diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index 1b3a16eebd2c..b5cf5bb4dc80 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -5,51 +5,41 @@ labels: "bug" --- **Bug Report** (A clear and concise description of what the bug is.) **To Reproduce** -(Write your steps here:) - -1. Step 1... -2. Step 2... -3. Step 3... +```python +# Ideally, a small sample program that demonstrates the problem. +# Or even better, a reproducible playground link https://mypy-play.net/ (use the "Gist" button) +``` **Expected Behavior** -(Write what you thought would happen.) - **Actual Behavior** - - -(Write what happened.) + **Your Environment** @@ -59,9 +49,5 @@ for this report: https://github.com/python/typeshed/issues - Mypy command-line flags: - Mypy configuration options from `mypy.ini` (and other config files): - Python version used: -- Operating system and version: - + diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 4794ec05c906..696eb8aee125 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,22 +1,12 @@ -### Have you read the [Contributing Guidelines](https://github.com/python/mypy/blob/master/CONTRIBUTING.md)? - -(Once you have, delete this section. If you leave it in, your PR may be closed without action.) - -### Description - - + (Explain how this PR changes mypy.) -## Test Plan - - -(Write your test plan here. If you changed any code, please provide us with clear instructions on how you verified your changes work.) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 9f984e3a346b..a3294c08a79c 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,8 +26,8 @@ jobs: with: python-version: '3.7' - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 - name: Setup tox environment - run: tox -e ${{ env.TOXENV }} --notest + run: tox run -e ${{ env.TOXENV }} --notest - name: Test - run: tox -e ${{ env.TOXENV }} --skip-pkg-install + run: tox run -e ${{ env.TOXENV }} --skip-pkg-install diff --git a/.github/workflows/mypy_primer.yml b/.github/workflows/mypy_primer.yml index 59ee859f1414..e7e4af1f07b7 100644 --- a/.github/workflows/mypy_primer.yml +++ b/.github/workflows/mypy_primer.yml @@ -13,9 +13,12 @@ on: - 'mypy/stubgen.py' - 'mypy/stubgenc.py' - 'mypy/test/**' - - 'scripts/**' - 'test-data/**' +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + jobs: mypy_primer: name: Run mypy_primer @@ -24,7 +27,7 @@ jobs: contents: read strategy: matrix: - shard-index: [0, 1, 2] + shard-index: [0, 1, 2, 3, 4] fail-fast: false steps: - uses: actions/checkout@v3 @@ -45,7 +48,7 @@ jobs: echo "new commit" git rev-list --format=%s --max-count=1 $GITHUB_SHA - MERGE_BASE=$(git merge-base $GITHUB_SHA origin/master) + MERGE_BASE=$(git merge-base $GITHUB_SHA origin/$GITHUB_BASE_REF) git checkout -b base_commit $MERGE_BASE echo "base commit" git rev-list --format=%s --max-count=1 base_commit @@ -57,8 +60,9 @@ jobs: mypy_primer \ --repo mypy_to_test \ --new $GITHUB_SHA --old base_commit \ - --num-shards 3 --shard-index ${{ matrix.shard-index }} \ + --num-shards 5 --shard-index ${{ matrix.shard-index }} \ --debug \ + --additional-flags="--debug-serialize" \ --output concise \ | tee diff_${{ matrix.shard-index }}.txt ) || [ $? -eq 1 ] diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml index 94d387fb7da0..12ce91c12910 100644 --- a/.github/workflows/mypy_primer_comment.yml +++ b/.github/workflows/mypy_primer_comment.yml @@ -15,6 +15,7 @@ jobs: comment: name: Comment PR from mypy_primer runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} steps: - name: Download diffs uses: actions/github-script@v6 @@ -47,15 +48,29 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | + const MAX_CHARACTERS = 30000 + const MAX_CHARACTERS_PER_PROJECT = MAX_CHARACTERS / 3 + const fs = require('fs') let data = fs.readFileSync('fulldiff.txt', { encoding: 'utf8' }) - // posting comment fails if too long, so truncate - if (data.length > 30000) { - let truncated_data = data.substring(0, 30000) - let lines_truncated = data.split('\n').length - truncated_data.split('\n').length - data = truncated_data + `\n\n... (truncated ${lines_truncated} lines) ...\n` + + function truncateIfNeeded(original, maxLength) { + if (original.length <= maxLength) { + return original + } + let truncated = original.substring(0, maxLength) + // further, remove last line that might be truncated + truncated = truncated.substring(0, truncated.lastIndexOf('\n')) + let lines_truncated = original.split('\n').length - truncated.split('\n').length + return `${truncated}\n\n... (truncated ${lines_truncated} lines) ...` } + const projects = data.split('\n\n') + // don't let one project dominate + data = projects.map(project => truncateIfNeeded(project, MAX_CHARACTERS_PER_PROJECT)).join('\n\n') + // posting comment fails if too long, so truncate + data = truncateIfNeeded(data, MAX_CHARACTERS) + console.log("Diff from mypy_primer:") console.log(data) @@ -75,8 +90,8 @@ jobs: return prNumber - name: Hide old comments - # v0.3.0 - uses: kanga333/comment-hider@bbdf5b562fbec24e6f60572d8f712017428b92e0 + # v0.4.0 + uses: kanga333/comment-hider@c12bb20b48aeb8fc098e35967de8d4f8018fffdf with: github_token: ${{ secrets.GITHUB_TOKEN }} leave_visible: 1 diff --git a/.github/workflows/sync_typeshed.yml b/.github/workflows/sync_typeshed.yml new file mode 100644 index 000000000000..1db2e846f099 --- /dev/null +++ b/.github/workflows/sync_typeshed.yml @@ -0,0 +1,33 @@ +name: Sync typeshed + +on: + workflow_dispatch: + schedule: + - cron: "0 0 1,15 * *" + +permissions: + contents: write + pull-requests: write + +jobs: + sync_typeshed: + name: Sync typeshed + if: github.repository == 'python/mypy' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + # TODO: use whatever solution ends up working for + # https://github.com/python/typeshed/issues/8434 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: git config + run: | + git config --global user.name mypybot + git config --global user.email '<>' + - name: Sync typeshed + run: | + python -m pip install requests==2.28.1 + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} python misc/sync-typeshed.py --make-pr diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e8f8a2a05e2b..ed0c82ef5fa1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,7 +15,7 @@ on: - CREDITS - LICENSE -concurrency: +concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true @@ -26,11 +26,6 @@ jobs: fail-fast: false matrix: include: - - name: Test suite with py37-windows-32 - python: '3.7' - arch: x86 - os: windows-latest - toxenv: py37 - name: Test suite with py37-windows-64 python: '3.7' arch: x64 @@ -68,6 +63,13 @@ jobs: os: ubuntu-latest toxenv: py tox_extra_args: "-n 2" + - name: Test suite with py311-ubuntu, mypyc-compiled + python: '3.11' + arch: x64 + os: ubuntu-latest + toxenv: py + tox_extra_args: "-n 2" + test_mypyc: true - name: mypyc runtime tests with py37-macos python: '3.7' arch: x64 @@ -100,6 +102,16 @@ jobs: name: ${{ matrix.name }} env: TOX_SKIP_MISSING_INTERPRETERS: False + # Rich (pip) + FORCE_COLOR: 1 + # Tox + PY_COLORS: 1 + # Mypy (see https://github.com/python/mypy/issues/7771) + TERM: xterm-color + MYPY_FORCE_COLOR: 1 + MYPY_FORCE_TERMINAL_WIDTH: 200 + # Pytest + PYTEST_ADDOPTS: --color=yes steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 @@ -115,31 +127,77 @@ jobs: ./misc/build-debug-python.sh $PYTHONVERSION $PYTHONDIR $VENV source $VENV/bin/activate - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==3.24.5 + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 - name: Compiled with mypyc if: ${{ matrix.test_mypyc }} run: | pip install -r test-requirements.txt - CC=clang MYPYC_OPT_LEVEL=0 python3 setup.py --use-mypyc build_ext --inplace + CC=clang MYPYC_OPT_LEVEL=0 MYPY_USE_MYPYC=1 pip install -e . - name: Setup tox environment - run: tox -e ${{ matrix.toxenv }} --notest + run: tox run -e ${{ matrix.toxenv }} --notest - name: Test - run: tox -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} + run: tox run -e ${{ matrix.toxenv }} --skip-pkg-install -- ${{ matrix.tox_extra_args }} python-nightly: runs-on: ubuntu-latest name: Test suite with Python nightly steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: '3.11-dev' - - name: Install tox - run: pip install --upgrade 'setuptools!=50' tox==3.24.5 - - name: Setup tox environment - run: tox -e py --notest - - name: Test - run: tox -e py --skip-pkg-install -- "-n 2" - continue-on-error: true - - name: Mark as a success - run: exit 0 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.12-dev' + - name: Install tox + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + - name: Setup tox environment + run: tox run -e py --notest + - name: Test + run: tox run -e py --skip-pkg-install -- "-n 2" + continue-on-error: true + - name: Mark as a success + run: exit 0 + + python_32bits: + runs-on: ubuntu-latest + name: Test mypyc suite with 32-bit Python + env: + TOX_SKIP_MISSING_INTERPRETERS: False + # Rich (pip) + FORCE_COLOR: 1 + # Tox + PY_COLORS: 1 + # Mypy (see https://github.com/python/mypy/issues/7771) + TERM: xterm-color + MYPY_FORCE_COLOR: 1 + MYPY_FORCE_TERMINAL_WIDTH: 200 + # Pytest + PYTEST_ADDOPTS: --color=yes + CXX: i686-linux-gnu-g++ + CC: i686-linux-gnu-gcc + steps: + - uses: actions/checkout@v3 + - name: Install 32-bit build dependencies + run: | + sudo dpkg --add-architecture i386 && \ + sudo apt-get update && sudo apt-get install -y \ + zlib1g-dev:i386 \ + g++-i686-linux-gnu \ + gcc-i686-linux-gnu \ + libffi-dev:i386 \ + libssl-dev:i386 \ + libbz2-dev:i386 \ + libncurses-dev:i386 \ + libreadline-dev:i386 \ + libsqlite3-dev:i386 \ + liblzma-dev:i386 \ + uuid-dev:i386 + - name: Compile, install, and activate 32-bit Python + uses: gabrielfalcao/pyenv-action@v13 + with: + default: 3.11.1 + command: python -c "import platform; print(f'{platform.architecture()=} {platform.machine()=}');" + - name: Install tox + run: pip install --upgrade 'setuptools!=50' tox==4.4.4 + - name: Setup tox environment + run: tox run -e py --notest + - name: Test + run: tox run -e py --skip-pkg-install -- -n 2 mypyc/test/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 53a8c5b541db..0de686b7eb01 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,16 @@ repos: - repo: https://github.com/psf/black - rev: 22.6.0 # must match test-requirements.txt + rev: 22.12.0 # must match test-requirements.txt hooks: - id: black - repo: https://github.com/pycqa/isort - rev: 5.10.1 # must match test-requirements.txt + rev: 5.11.4 # must match test-requirements.txt hooks: - id: isort - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 # must match test-requirements.txt + rev: 5.0.4 # must match test-requirements.txt hooks: - id: flake8 additional_dependencies: - - flake8-bugbear==22.7.1 # must match test-requirements.txt - - flake8-noqa==1.2.8 # must match test-requirements.txt + - flake8-bugbear==22.12.6 # must match test-requirements.txt + - flake8-noqa==1.3.0 # must match test-requirements.txt diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c433eaee05b9..2b2e6cdb9734 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -53,11 +53,17 @@ python3 runtests.py You can also use `tox` to run tests (`tox` handles setting up the test environment for you): ``` -tox -e py +tox run -e py + +# Or some specific python version: +tox run -e py39 + +# Or some specific command: +tox run -e lint ``` Some useful commands for running specific tests include: -``` +```bash # Use mypy to check mypy's own code python3 runtests.py self # or equivalently: diff --git a/README.md b/README.md index 68e0975a791b..6c9f01968f92 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Mypy: Static Typing for Python [![Build Status](https://github.com/python/mypy/actions/workflows/test.yml/badge.svg)](https://github.com/python/mypy/actions) [![Documentation Status](https://readthedocs.org/projects/mypy/badge/?version=latest)](https://mypy.readthedocs.io/en/latest/?badge=latest) [![Chat at https://gitter.im/python/typing](https://badges.gitter.im/python/typing.svg)](https://gitter.im/python/typing?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) +[![Checked with mypy](https://www.mypy-lang.org/static/mypy_badge.svg)](https://mypy-lang.org/) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) @@ -58,10 +58,6 @@ Python is a dynamic language, so usually you'll only see errors in your code when you attempt to run it. Mypy is a *static* checker, so it finds bugs in your programs without even running them! -Mypy is designed with gradual typing in mind. This means you can add type -hints to your code base slowly and that you can always fall back to dynamic -typing when static typing is not convenient. - Here is a small example to whet your appetite: ```python @@ -69,12 +65,26 @@ number = input("What is your favourite number?") print("It is", number + 1) # error: Unsupported operand types for + ("str" and "int") ``` -See [the documentation](https://mypy.readthedocs.io/en/stable/index.html) for more examples. +Adding type hints for mypy does not interfere with the way your program would +otherwise run. Think of type hints as similar to comments! You can always use +the Python interpreter to run your code, even if mypy reports errors. + +Mypy is designed with gradual typing in mind. This means you can add type +hints to your code base slowly and that you can always fall back to dynamic +typing when static typing is not convenient. + +Mypy has a powerful and easy-to-use type system, supporting features such as +type inference, generics, callable types, tuple types, union types, +structural subtyping and more. Using mypy will make your programs easier to +understand, debug, and maintain. + +See [the documentation](https://mypy.readthedocs.io/en/stable/index.html) for +more examples and information. In particular, see: - [type hints cheat sheet](https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html) - [getting started](https://mypy.readthedocs.io/en/stable/getting_started.html) - +- [list of error codes](https://mypy.readthedocs.io/en/stable/error_code_list.html) Quick start ----------- @@ -100,9 +110,13 @@ programs, even if mypy reports type errors: python3 PROGRAM You can also try mypy in an [online playground](https://mypy-play.net/) (developed by -Yusuke Miyazaki). +Yusuke Miyazaki). If you are working with large code bases, you can run mypy in +[daemon mode], that will give much faster (often sub-second) incremental updates: + + dmypy run -- PROGRAM [statically typed parts]: https://mypy.readthedocs.io/en/latest/getting_started.html#function-signatures-and-dynamic-vs-static-typing +[daemon mode]: https://mypy.readthedocs.io/en/stable/mypy_daemon.html Integrations @@ -115,7 +129,7 @@ Mypy can be integrated into popular IDEs: `let g:syntastic_python_checkers=['mypy']` * Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` -* Emacs: using [Flycheck](https://github.com/flycheck/) and [Flycheck-mypy](https://github.com/lbolla/emacs-flycheck-mypy) +* Emacs: using [Flycheck](https://github.com/flycheck/) * Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) * Atom: [linter-mypy](https://atom.io/packages/linter-mypy) * PyCharm: [mypy plugin](https://github.com/dropbox/mypy-PyCharm-plugin) (PyCharm integrates @@ -151,14 +165,7 @@ To get started with developing mypy, see [CONTRIBUTING.md](CONTRIBUTING.md). If you need help getting started, don't hesitate to ask on [gitter](https://gitter.im/python/typing). -Development status ------------------- - -Mypy is beta software, but it has already been used in production -for several years at Dropbox and in many other organizations, and -it has an extensive test suite. - -mypyc and compiled version of mypy +Mypyc and compiled version of mypy ---------------------------------- [Mypyc](https://github.com/mypyc/mypyc) uses Python type hints to compile Python diff --git a/build-requirements.txt b/build-requirements.txt index dabc9b14c493..52c518d53bc2 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1,3 +1,5 @@ +# NOTE: this needs to be kept in sync with the "requires" list in pyproject.toml -r mypy-requirements.txt +types-psutil types-setuptools -types-typed-ast>=1.5.0,<1.6.0 +types-typed-ast>=1.5.8,<1.6.0 diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst index 19e0d4dcce01..ef5bf9e8936d 100644 --- a/docs/source/additional_features.rst +++ b/docs/source/additional_features.rst @@ -177,7 +177,7 @@ Caveats/Known Issues will complain about not understanding the argument and the type annotation in :py:meth:`__init__ ` will be replaced by ``Any``. -* :ref:`Validator decorators ` +* :ref:`Validator decorators ` and `default decorators `_ are not type-checked against the attribute they are setting/validating. diff --git a/docs/source/builtin_types.rst b/docs/source/builtin_types.rst index 7ff9bd3c38e9..37b56169d879 100644 --- a/docs/source/builtin_types.rst +++ b/docs/source/builtin_types.rst @@ -15,8 +15,8 @@ Type Description ``int`` integer ``float`` floating point number ``bool`` boolean value (subclass of ``int``) -``str`` string (unicode in Python 3) -``bytes`` 8-bit string +``str`` text, sequence of unicode codepoints +``bytes`` 8-bit string, sequence of byte values ``object`` an arbitrary object (``object`` is the common base class) ====================== =============================== diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 29a25f38eac2..5aa1770512b8 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -1,34 +1,27 @@ .. _cheat-sheet-py3: -Type hints cheat sheet (Python 3) -================================= - -This document is a quick cheat sheet showing how the :pep:`484` type -annotation notation represents various common types in Python 3. - -.. note:: - - Technically many of the type annotations shown below are redundant, - because mypy can derive them from the type of the expression. So - many of the examples have a dual purpose: show how to write the - annotation, and show the inferred types. +Type hints cheat sheet +====================== +This document is a quick cheat sheet showing how to use type +annotations for various common types in Python. Variables ********* -Python 3.6 introduced a syntax for annotating variables in :pep:`526` -and we use it in most examples. +Technically many of the type annotations shown below are redundant, +since mypy can usually infer the type of a variable from its value. +See :ref:`type-inference-and-annotations` for more details. .. code-block:: python - # This is how you declare the type of a variable type in Python 3.6 + # This is how you declare the type of a variable age: int = 1 # You don't need to initialize a variable to annotate it a: int # Ok (no value at runtime until assigned) - # The latter is useful in conditional branches + # Doing so is useful in conditional branches child: bool if age < 18: child = True @@ -36,45 +29,52 @@ and we use it in most examples. child = False -Built-in types -************** +Useful built-in types +********************* .. code-block:: python - - from typing import List, Set, Dict, Tuple, Optional - - # For simple built-in types, just use the name of the type + # For most types, just use the name of the type. + # Note that mypy can usually infer the type of a variable from its value, + # so technically these annotations are redundant x: int = 1 x: float = 1.0 x: bool = True x: str = "test" x: bytes = b"test" - # For collections, the type of the collection item is in brackets - # (Python 3.9+) + # For collections on Python 3.9+, the type of the collection item is in brackets x: list[int] = [1] x: set[int] = {6, 7} - # In Python 3.8 and earlier, the name of the collection type is - # capitalized, and the type is imported from the 'typing' module - x: List[int] = [1] - x: Set[int] = {6, 7} - # For mappings, we need the types of both keys and values x: dict[str, float] = {"field": 2.0} # Python 3.9+ - x: Dict[str, float] = {"field": 2.0} # For tuples of fixed size, we specify the types of all the elements x: tuple[int, str, float] = (3, "yes", 7.5) # Python 3.9+ - x: Tuple[int, str, float] = (3, "yes", 7.5) # For tuples of variable size, we use one type and ellipsis x: tuple[int, ...] = (1, 2, 3) # Python 3.9+ + + # On Python 3.8 and earlier, the name of the collection type is + # capitalized, and the type is imported from the 'typing' module + from typing import List, Set, Dict, Tuple + x: List[int] = [1] + x: Set[int] = {6, 7} + x: Dict[str, float] = {"field": 2.0} + x: Tuple[int, str, float] = (3, "yes", 7.5) x: Tuple[int, ...] = (1, 2, 3) - # Use Optional[] for values that could be None - x: Optional[str] = some_function() + from typing import Union, Optional + + # On Python 3.10+, use the | operator when something could be one of a few types + x: list[int | str] = [3, 5, "test", "fun"] # Python 3.10+ + # On earlier versions, use Union + x: list[Union[int, str]] = [3, 5, "test", "fun"] + + # Use Optional[X] for a value that could be None + # Optional[X] is the same as X | None or Union[X, None] + x: Optional[str] = "something" if some_condition() else None # Mypy understands a value can't be None in an if-statement if x is not None: print(x.upper()) @@ -85,8 +85,6 @@ Built-in types Functions ********* -Python 3 supports an annotation syntax for function declarations. - .. code-block:: python from typing import Callable, Iterator, Union, Optional @@ -99,16 +97,23 @@ Python 3 supports an annotation syntax for function declarations. def plus(num1: int, num2: int) -> int: return num1 + num2 - # Add default value for an argument after the type annotation - def f(num1: int, my_float: float = 3.5) -> float: - return num1 + my_float + # If a function does not return a value, use None as the return type + # Default value for an argument goes after the type annotation + def show(value: str, excitement: int = 10) -> None: + print(value + "!" * excitement) + + # Note that arguments without a type are dynamically typed (treated as Any) + # and that functions without any annotations not checked + def untyped(x): + x.anything() + 1 + "string" # no errors # This is how you annotate a callable (function) value x: Callable[[int, float], float] = f + def register(callback: Callable[[str], int]) -> None: ... # A generator function that yields ints is secretly just a function that # returns an iterator of ints, so that's how we annotate it - def g(n: int) -> Iterator[int]: + def gen(n: int) -> Iterator[int]: i = 0 while i < n: yield i @@ -119,78 +124,140 @@ Python 3 supports an annotation syntax for function declarations. sender: str, cc: Optional[list[str]], bcc: Optional[list[str]], - subject='', + subject: str = '', body: Optional[list[str]] = None ) -> bool: ... - # An argument can be declared positional-only by giving it a name - # starting with two underscores: - def quux(__x: int) -> None: + # Mypy understands positional-only and keyword-only arguments + # Positional-only arguments can also be marked by using a name starting with + # two underscores + def quux(x: int, / *, y: int) -> None: pass - quux(3) # Fine - quux(__x=3) # Error + quux(3, y=5) # Ok + quux(3, 5) # error: Too many positional arguments for "quux" + quux(x=3, y=5) # error: Unexpected keyword argument "x" for "quux" + + # This says each positional arg and each keyword arg is a "str" + def call(self, *args: str, **kwargs: str) -> str: + reveal_type(args) # Revealed type is "tuple[str, ...]" + reveal_type(kwargs) # Revealed type is "dict[str, str]" + request = make_request(*args, **kwargs) + return self.do_api_query(request) + +Classes +******* + +.. code-block:: python + + class BankAccount: + # The "__init__" method doesn't return anything, so it gets return + # type "None" just like any other method that doesn't return anything + def __init__(self, account_name: str, initial_balance: int = 0) -> None: + # mypy will infer the correct types for these instance variables + # based on the types of the parameters. + self.account_name = account_name + self.balance = initial_balance + + # For instance methods, omit type for "self" + def deposit(self, amount: int) -> None: + self.balance += amount + + def withdraw(self, amount: int) -> None: + self.balance -= amount + + # User-defined classes are valid as types in annotations + account: BankAccount = BankAccount("Alice", 400) + def transfer(src: BankAccount, dst: BankAccount, amount: int) -> None: + src.withdraw(amount) + dst.deposit(amount) + + # Functions that accept BankAccount also accept any subclass of BankAccount! + class AuditedBankAccount(BankAccount): + # You can optionally declare instance variables in the class body + audit_log: list[str] + # This is an instance variable with a default value + auditor_name: str = "The Spanish Inquisition" + + def __init__(self, account_name: str, initial_balance: int = 0) -> None: + super().__init__(account_name, initial_balance) + self.audit_log: list[str] = [] + + def deposit(self, amount: int) -> None: + self.audit_log.append(f"Deposited {amount}") + self.balance += amount + + def withdraw(self, amount: int) -> None: + self.audit_log.append(f"Withdrew {amount}") + self.balance -= amount + + audited = AuditedBankAccount("Bob", 300) + transfer(audited, account, 100) # type checks! + + # You can use the ClassVar annotation to declare a class variable + class Car: + seats: ClassVar[int] = 4 + passengers: ClassVar[list[str]] + + # If you want dynamic attributes on your class, have it + # override "__setattr__" or "__getattr__" + class A: + # This will allow assignment to any A.x, if x is the same type as "value" + # (use "value: Any" to allow arbitrary types) + def __setattr__(self, name: str, value: int) -> None: ... + + # This will allow access to any A.x, if x is compatible with the return type + def __getattr__(self, name: str) -> int: ... + + a.foo = 42 # Works + a.bar = 'Ex-parrot' # Fails type checking When you're puzzled or when things are complicated ************************************************** .. code-block:: python - from typing import Union, Any, Optional, cast + from typing import Union, Any, Optional, TYPE_CHECKING, cast # To find out what type mypy infers for an expression anywhere in # your program, wrap it in reveal_type(). Mypy will print an error # message with the type; remove it again before running the code. - reveal_type(1) # -> Revealed type is "builtins.int" - - # Use Union when something could be one of a few types - x: list[Union[int, str]] = [3, 5, "test", "fun"] - - # Use Any if you don't know the type of something or it's too - # dynamic to write a type for - x: Any = mystery_function() + reveal_type(1) # Revealed type is "builtins.int" # If you initialize a variable with an empty container or "None" - # you may have to help mypy a bit by providing a type annotation + # you may have to help mypy a bit by providing an explicit type annotation x: list[str] = [] x: Optional[str] = None - # This makes each positional arg and each keyword arg a "str" - def call(self, *args: str, **kwargs: str) -> str: - request = make_request(*args, **kwargs) - return self.do_api_query(request) + # Use Any if you don't know the type of something or it's too + # dynamic to write a type for + x: Any = mystery_function() + # Mypy will let you do anything with x! + x.whatever() * x["you"] + x("want") - any(x) and all(x) is super # no errors # Use a "type: ignore" comment to suppress errors on a given line, # when your code confuses mypy or runs into an outright bug in mypy. - # Good practice is to comment every "ignore" with a bug link - # (in mypy, typeshed, or your own code) or an explanation of the issue. - x = confusing_function() # type: ignore # https://github.com/python/mypy/issues/1167 + # Good practice is to add a comment explaining the issue. + x = confusing_function() # type: ignore # confusing_function won't return None here because ... # "cast" is a helper function that lets you override the inferred # type of an expression. It's only for mypy -- there's no runtime check. a = [4] b = cast(list[int], a) # Passes fine - c = cast(list[str], a) # Passes fine (no runtime check) - reveal_type(c) # -> Revealed type is "builtins.list[builtins.str]" - print(c) # -> [4]; the object is not cast - - # If you want dynamic attributes on your class, have it override "__setattr__" - # or "__getattr__" in a stub or in your source code. - # - # "__setattr__" allows for dynamic assignment to names - # "__getattr__" allows for dynamic access to names - class A: - # This will allow assignment to any A.x, if x is the same type as "value" - # (use "value: Any" to allow arbitrary types) - def __setattr__(self, name: str, value: int) -> None: ... - - # This will allow access to any A.x, if x is compatible with the return type - def __getattr__(self, name: str) -> int: ... - - a.foo = 42 # Works - a.bar = 'Ex-parrot' # Fails type checking + c = cast(list[str], a) # Passes fine despite being a lie (no runtime check) + reveal_type(c) # Revealed type is "builtins.list[builtins.str]" + print(c) # Still prints [4] ... the object is not changed or casted at runtime + + # Use "TYPE_CHECKING" if you want to have code that mypy can see but will not + # be executed at runtime (or to have code that mypy can't see) + if TYPE_CHECKING: + import json + else: + import orjson as json # mypy is unaware of this +In some cases type annotations can cause issues at runtime, see +:ref:`runtime_troubles` for dealing with this. Standard "duck types" ********************* @@ -216,7 +283,7 @@ that are common in idiomatic Python are standardized. # Mapping describes a dict-like object (with "__getitem__") that we won't # mutate, and MutableMapping one (with "__setitem__") that we might def f(my_mapping: Mapping[int, str]) -> list[int]: - my_mapping[5] = 'maybe' # if we try this, mypy will throw an error... + my_mapping[5] = 'maybe' # mypy will complain about this line... return list(my_mapping.keys()) f({3: 'yes', 4: 'no'}) @@ -230,40 +297,6 @@ that are common in idiomatic Python are standardized. You can even make your own duck types using :ref:`protocol-types`. -Classes -******* - -.. code-block:: python - - class MyClass: - # You can optionally declare instance variables in the class body - attr: int - # This is an instance variable with a default value - charge_percent: int = 100 - - # The "__init__" method doesn't return anything, so it gets return - # type "None" just like any other method that doesn't return anything - def __init__(self) -> None: - ... - - # For instance methods, omit type for "self" - def my_method(self, num: int, str1: str) -> str: - return num * str1 - - # User-defined classes are valid as types in annotations - x: MyClass = MyClass() - - # You can use the ClassVar annotation to declare a class variable - class Car: - seats: ClassVar[int] = 4 - passengers: ClassVar[list[str]] - - # You can also declare the type of an attribute in "__init__" - class Box: - def __init__(self) -> None: - self.items: list[str] = [] - - Coroutines and asyncio ********************** @@ -288,11 +321,7 @@ Miscellaneous .. code-block:: python import sys - import re - from typing import Match, IO - - # "typing.Match" describes regex matches from the re module - x: Match[str] = re.match(r'[0-9]+', "15") + from typing import IO # Use IO[] for functions that should accept or return any # object that comes from an open() call (IO[] does not @@ -307,7 +336,7 @@ Miscellaneous # Forward references are useful if you want to reference a class before # it is defined - def f(foo: A) -> int: # This will fail + def f(foo: A) -> int: # This will fail at runtime with 'A' is not defined ... class A: diff --git a/docs/source/class_basics.rst b/docs/source/class_basics.rst index 1eaba59a10c2..1d4164192318 100644 --- a/docs/source/class_basics.rst +++ b/docs/source/class_basics.rst @@ -308,6 +308,26 @@ however: in this case, but any attempt to construct an instance will be flagged as an error. +Mypy allows you to omit the body for an abstract method, but if you do so, +it is unsafe to call such method via ``super()``. For example: + +.. code-block:: python + + from abc import abstractmethod + class Base: + @abstractmethod + def foo(self) -> int: pass + @abstractmethod + def bar(self) -> int: + return 0 + class Sub(Base): + def foo(self) -> int: + return super().foo() + 1 # error: Call to abstract method "foo" of "Base" + # with trivial body via super() is unsafe + @abstractmethod + def bar(self) -> int: + return super().bar() + 1 # This is OK however. + A class can inherit any number of classes, both abstract and concrete. As with normal overrides, a dynamically typed method can override or implement a statically typed method defined in any base diff --git a/docs/source/command_line.rst b/docs/source/command_line.rst index e2175a7f35d4..31d23db204eb 100644 --- a/docs/source/command_line.rst +++ b/docs/source/command_line.rst @@ -129,30 +129,12 @@ Import discovery The following flags customize how exactly mypy discovers and follows imports. -.. option:: --namespace-packages - - This flag enables import discovery to use namespace packages (see - :pep:`420`). In particular, this allows discovery of imported - packages that don't have an ``__init__.py`` (or ``__init__.pyi``) - file. - - Namespace packages are found (using the PEP 420 rules, which - prefers "classic" packages over namespace packages) along the - module search path -- this is primarily set from the source files - passed on the command line, the ``MYPYPATH`` environment variable, - and the :confval:`mypy_path` config option. - - This flag affects how mypy finds modules and packages explicitly passed on - the command line. It also affects how mypy determines fully qualified module - names for files passed on the command line. See :ref:`Mapping file paths to - modules ` for details. - .. option:: --explicit-package-bases This flag tells mypy that top-level packages will be based in either the current directory, or a member of the ``MYPYPATH`` environment variable or :confval:`mypy_path` config option. This option is only useful in - conjunction with :option:`--namespace-packages`. See :ref:`Mapping file + in the absence of `__init__.py`. See :ref:`Mapping file paths to modules ` for details. .. option:: --ignore-missing-imports @@ -236,6 +218,18 @@ imports. setting the :option:`--fast-module-lookup` option. +.. option:: --no-namespace-packages + + This flag disables import discovery of namespace packages (see :pep:`420`). + In particular, this prevents discovery of packages that don't have an + ``__init__.py`` (or ``__init__.pyi``) file. + + This flag affects how mypy finds modules and packages explicitly passed on + the command line. It also affects how mypy determines fully qualified module + names for files passed on the command line. See :ref:`Mapping file paths to + modules ` for details. + + .. _platform-configuration: Platform configuration @@ -390,29 +384,23 @@ None and Optional handling The following flags adjust how mypy handles values of type ``None``. For more details, see :ref:`no_strict_optional`. -.. _no-implicit-optional: +.. _implicit-optional: -.. option:: --no-implicit-optional +.. option:: --implicit-optional - This flag causes mypy to stop treating arguments with a ``None`` + This flag causes mypy to treat arguments with a ``None`` default value as having an implicit :py:data:`~typing.Optional` type. - For example, by default mypy will assume that the ``x`` parameter - is of type ``Optional[int]`` in the code snippet below since - the default parameter is ``None``: + For example, if this flag is set, mypy would assume that the ``x`` + parameter is actually of type ``Optional[int]`` in the code snippet below + since the default parameter is ``None``: .. code-block:: python def foo(x: int = None) -> None: print(x) - If this flag is set, the above snippet will no longer type check: - we must now explicitly indicate that the type is ``Optional[int]``: - - .. code-block:: python - - def foo(x: Optional[int] = None) -> None: - print(x) + **Note:** This was disabled by default starting in mypy 0.980. .. option:: --no-strict-optional @@ -460,9 +448,10 @@ potentially problematic or redundant in some way. are when: - The function has a ``None`` or ``Any`` return type - - The function has an empty body or a body that is just - ellipsis (``...``). Empty functions are often used for - abstract methods. + - The function has an empty body and is marked as an abstract method, + is in a protocol class, or is in a stub file + - The execution path can never return; for example, if an exception + is always raised Passing in :option:`--no-warn-no-return` will disable these error messages in all cases. @@ -703,9 +692,9 @@ in error messages. ``file:line:column:end_line:end_column``. This option implies ``--show-column-numbers``. -.. option:: --show-error-codes +.. option:: --hide-error-codes - This flag will add an error code ``[]`` to error messages. The error + This flag will hide the error code ``[]`` from error messages. By default, the error code is shown after each error message:: prog.py:1: error: "str" has no attribute "trim" [attr-defined] @@ -830,7 +819,8 @@ in developing or debugging mypy internals. submitting them upstream, but also allows you to use a forked version of typeshed. - Note that this doesn't affect third-party library stubs. + Note that this doesn't affect third-party library stubs. To test third-party stubs, + for example try ``MYPYPATH=stubs/six mypy ...``. .. _warn-incomplete-stub: diff --git a/docs/source/common_issues.rst b/docs/source/common_issues.rst index d2302469518d..afb8e7d3ffe1 100644 --- a/docs/source/common_issues.rst +++ b/docs/source/common_issues.rst @@ -9,15 +9,6 @@ doesn't work as expected. Statically typed code is often identical to normal Python code (except for type annotations), but sometimes you need to do things slightly differently. -Can't install mypy using pip ----------------------------- - -If installation fails, you've probably hit one of these issues: - -* Mypy needs Python 3.6 or later to run. -* You may have to run pip like this: - ``python3 -m pip install mypy``. - .. _annotations_needed: No errors reported for obviously wrong code @@ -26,7 +17,9 @@ No errors reported for obviously wrong code There are several common reasons why obviously wrong code is not flagged as an error. -**The function containing the error is not annotated.** Functions that +**The function containing the error is not annotated.** + +Functions that do not have any annotations (neither for any argument nor for the return type) are not type-checked, and even the most blatant type errors (e.g. ``2 + 'a'``) pass silently. The solution is to add @@ -52,7 +45,9 @@ once you add annotations: If you don't know what types to add, you can use ``Any``, but beware: -**One of the values involved has type 'Any'.** Extending the above +**One of the values involved has type 'Any'.** + +Extending the above example, if we were to leave out the annotation for ``a``, we'd get no error: @@ -68,49 +63,52 @@ The reason is that if the type of ``a`` is unknown, the type of If you're having trouble debugging such situations, :ref:`reveal_type() ` might come in handy. -Note that sometimes library stubs have imprecise type information, -e.g. the :py:func:`pow` builtin returns ``Any`` (see `typeshed issue 285 -`_ for the reason). +Note that sometimes library stubs with imprecise type information +can be a source of ``Any`` values. :py:meth:`__init__ ` **method has no annotated -arguments or return type annotation.** :py:meth:`__init__ ` -is considered fully-annotated **if at least one argument is annotated**, -while mypy will infer the return type as ``None``. -The implication is that, for a :py:meth:`__init__ ` method -that has no argument, you'll have to explicitly annotate the return type -as ``None`` to type-check this :py:meth:`__init__ ` method: +arguments and no return type annotation.** + +This is basically a combination of the two cases above, in that ``__init__`` +without annotations can cause ``Any`` types leak into instance variables: .. code-block:: python - def foo(s: str) -> str: - return s + class Bad: + def __init__(self): + self.value = "asdf" + 1 + "asdf" # No error! - class A(): - def __init__(self, value: str): # Return type inferred as None, considered as typed method + bad = Bad() + bad.value + 1 # No error! + reveal_type(bad) # Revealed type is "__main__.Bad" + reveal_type(bad.value) # Revealed type is "Any" + + class Good: + def __init__(self) -> None: # Explicitly return None self.value = value - foo(1) # error: Argument 1 to "foo" has incompatible type "int"; expected "str" - - class B(): - def __init__(self): # No argument is annotated, considered as untyped method - foo(1) # No error! - - class C(): - def __init__(self) -> None: # Must specify return type to type-check - foo(1) # error: Argument 1 to "foo" has incompatible type "int"; expected "str" - -**Some imports may be silently ignored**. Another source of -unexpected ``Any`` values are the :option:`--ignore-missing-imports -` and :option:`--follow-imports=skip -` flags. When you use :option:`--ignore-missing-imports `, -any imported module that cannot be found is silently replaced with -``Any``. When using :option:`--follow-imports=skip ` the same is true for -modules for which a ``.py`` file is found but that are not specified -on the command line. (If a ``.pyi`` stub is found it is always -processed normally, regardless of the value of -:option:`--follow-imports `.) To help debug the former situation (no -module found at all) leave out :option:`--ignore-missing-imports `; to get -clarity about the latter use :option:`--follow-imports=error `. You can -read up about these and other useful flags in :ref:`command-line`. + + +**Some imports may be silently ignored**. + +A common source of unexpected ``Any`` values is the +:option:`--ignore-missing-imports ` flag. + +When you use :option:`--ignore-missing-imports `, +any imported module that cannot be found is silently replaced with ``Any``. + +To help debug this, simply leave out +:option:`--ignore-missing-imports `. +As mentioned in :ref:`fix-missing-imports`, setting ``ignore_missing_imports=True`` +on a per-module basis will make bad surprises less likely and is highly encouraged. + +Use of the :option:`--follow-imports=skip ` flags can also +cause problems. Use of these flags is strongly discouraged and only required in +relatively niche situations. See :ref:`follow-imports` for more information. + +**mypy considers some of your code unreachable**. + +See :ref:`unreachable` for more information. **A function annotated as returning a non-optional type returns 'None' and mypy doesn't complain**. @@ -186,16 +184,16 @@ over ``.py`` files. Ignoring a whole file --------------------- -A ``# type: ignore`` comment at the top of a module (before any statements, -including imports or docstrings) has the effect of ignoring the *entire* module. - -.. code-block:: python +* To only ignore errors, use a top-level ``# mypy: ignore-errors`` comment instead. +* To only ignore errors with a specific error code, use a top-level + ``# mypy: disable-error-code=...`` comment. +* To replace the contents of a module with ``Any``, use a per-module ``follow_imports = skip``. + See :ref:`Following imports ` for details. - # type: ignore - - import foo - - foo.bar() +Note that a ``# type: ignore`` comment at the top of a module (before any statements, +including imports or docstrings) has the effect of ignoring the entire contents of the module. +This behaviour can be surprising and result in +"Module ... has no attribute ... [attr-defined]" errors. Issues with code at runtime --------------------------- @@ -254,20 +252,20 @@ Redefinitions with incompatible types Each name within a function only has a single 'declared' type. You can reuse for loop indices etc., but if you want to use a variable with -multiple types within a single function, you may need to declare it -with the ``Any`` type. +multiple types within a single function, you may need to instead use +multiple variables (or maybe declare the variable with an ``Any`` type). .. code-block:: python def f() -> None: n = 1 ... - n = 'x' # Type error: n has type int + n = 'x' # error: Incompatible types in assignment (expression has type "str", variable has type "int") .. note:: - This limitation could be lifted in a future mypy - release. + Using the :option:`--allow-redefinition ` + flag can suppress this error in several cases. Note that you can redefine a variable with a more *precise* or a more concrete type. For example, you can redefine a sequence (which does @@ -281,6 +279,8 @@ not support ``sort()``) as a list and sort it in-place: # Type of x is List[int] here. x.sort() # Okay! +See :ref:`type-narrowing` for more information. + .. _variance: Invariance vs covariance @@ -332,24 +332,24 @@ Declaring a supertype as variable type Sometimes the inferred type is a subtype (subclass) of the desired type. The type inference uses the first assignment to infer the type -of a name (assume here that ``Shape`` is the base class of both -``Circle`` and ``Triangle``): +of a name: .. code-block:: python - shape = Circle() # Infer shape to be Circle - ... - shape = Triangle() # Type error: Triangle is not a Circle + class Shape: ... + class Circle(Shape): ... + class Triangle(Shape): ... + + shape = Circle() # mypy infers the type of shape to be Circle + shape = Triangle() # error: Incompatible types in assignment (expression has type "Triangle", variable has type "Circle") You can just give an explicit type for the variable in cases such the above example: .. code-block:: python - shape = Circle() # type: Shape # The variable s can be any Shape, - # not just Circle - ... - shape = Triangle() # OK + shape: Shape = Circle() # The variable s can be any Shape, not just Circle + shape = Triangle() # OK Complex type tests ------------------ @@ -614,7 +614,10 @@ You can install the latest development version of mypy from source. Clone the git clone https://github.com/python/mypy.git cd mypy - sudo python3 -m pip install --upgrade . + python3 -m pip install --upgrade . + +To install a development version of mypy that is mypyc-compiled, see the +instructions at the `mypyc wheels repo `_. Variables vs type aliases ------------------------- diff --git a/docs/source/config_file.rst b/docs/source/config_file.rst index 663a0d2229a6..3b96e6bd7a5a 100644 --- a/docs/source/config_file.rst +++ b/docs/source/config_file.rst @@ -191,6 +191,28 @@ section of the command line docs. This option may only be set in the global section (``[mypy]``). +.. confval:: modules + + :type: comma-separated list of strings + + A comma-separated list of packages which should be checked by mypy if none are given on the command + line. Mypy *will not* recursively type check any submodules of the provided + module. + + This option may only be set in the global section (``[mypy]``). + + +.. confval:: packages + + :type: comma-separated list of strings + + A comma-separated list of packages which should be checked by mypy if none are given on the command + line. Mypy *will* recursively type check any submodules of the provided + package. This flag is identical to :confval:`modules` apart from this + behavior. + + This option may only be set in the global section (``[mypy]``). + .. confval:: exclude :type: regular expression @@ -254,10 +276,11 @@ section of the command line docs. .. confval:: namespace_packages :type: boolean - :default: False + :default: True Enables :pep:`420` style namespace packages. See the - corresponding flag :option:`--namespace-packages ` for more information. + corresponding flag :option:`--no-namespace-packages ` + for more information. This option may only be set in the global section (``[mypy]``). @@ -269,7 +292,7 @@ section of the command line docs. This flag tells mypy that top-level packages will be based in either the current directory, or a member of the ``MYPYPATH`` environment variable or :confval:`mypy_path` config option. This option is only useful in - conjunction with :confval:`namespace_packages`. See :ref:`Mapping file + the absence of `__init__.py`. See :ref:`Mapping file paths to modules ` for details. This option may only be set in the global section (``[mypy]``). @@ -503,13 +526,15 @@ None and Optional handling For more information, see the :ref:`None and Optional handling ` section of the command line docs. -.. confval:: no_implicit_optional +.. confval:: implicit_optional :type: boolean :default: False - Changes the treatment of arguments with a default value of ``None`` by not implicitly - making their type :py:data:`~typing.Optional`. + Causes mypy to treat arguments with a ``None`` + default value as having an implicit :py:data:`~typing.Optional` type. + + **Note:** This was True by default in mypy versions 0.980 and earlier. .. confval:: strict_optional @@ -574,14 +599,6 @@ Suppressing errors Note: these configuration options are available in the config file only. There is no analog available via the command line options. -.. confval:: show_none_errors - - :type: boolean - :default: True - - Shows errors related to strict ``None`` checking, if the global :confval:`strict_optional` - flag is enabled. - .. confval:: ignore_errors :type: boolean @@ -722,12 +739,12 @@ These options may only be set in the global section (``[mypy]``). Shows column numbers in error messages. -.. confval:: show_error_codes +.. confval:: hide_error_codes :type: boolean :default: False - Shows error codes in error messages. See :ref:`error-codes` for more information. + Hides error codes in error messages. See :ref:`error-codes` for more information. .. confval:: pretty @@ -858,9 +875,16 @@ These options may only be set in the global section (``[mypy]``). :type: string - Specifies an alternative directory to look for stubs instead of the - default ``typeshed`` directory. User home directory and environment - variables will be expanded. + This specifies the directory where mypy looks for standard library typeshed + stubs, instead of the typeshed that ships with mypy. This is + primarily intended to make it easier to test typeshed changes before + submitting them upstream, but also allows you to use a forked version of + typeshed. + + User home directory and environment variables will be expanded. + + Note that this doesn't affect third-party library stubs. To test third-party stubs, + for example try ``MYPYPATH=stubs/six mypy ...``. .. confval:: warn_incomplete_stub @@ -877,6 +901,12 @@ Report generation If these options are set, mypy will generate a report in the specified format into the specified directory. +.. warning:: + + Generating reports disables incremental mode and can significantly slow down + your workflow. It is recommended to enable reporting only for specific runs + (e.g. in CI). + .. confval:: any_exprs_report :type: string diff --git a/docs/source/dynamic_typing.rst b/docs/source/dynamic_typing.rst index 390bc52d9e2c..d3476de2ca64 100644 --- a/docs/source/dynamic_typing.rst +++ b/docs/source/dynamic_typing.rst @@ -4,27 +4,39 @@ Dynamically typed code ====================== -As mentioned earlier, bodies of functions that don't have any explicit -types in their function annotation are dynamically typed (operations -are checked at runtime). Code outside functions is statically typed by -default, and types of variables are inferred. This does usually the -right thing, but you can also make any variable dynamically typed by -defining it explicitly with the type ``Any``: +In :ref:`getting-started-dynamic-vs-static`, we discussed how bodies of functions +that don't have any explicit type annotations in their function are "dynamically typed" +and that mypy will not check them. In this section, we'll talk a little bit more +about what that means and how you can enable dynamic typing on a more fine grained basis. + +In cases where your code is too magical for mypy to understand, you can make a +variable or parameter dynamically typed by explicitly giving it the type +``Any``. Mypy will let you do basically anything with a value of type ``Any``, +including assigning a value of type ``Any`` to a variable of any type (or vice +versa). .. code-block:: python from typing import Any - s = 1 # Statically typed (type int) - d: Any = 1 # Dynamically typed (type Any) - s = 'x' # Type check error - d = 'x' # OK + num = 1 # Statically typed (inferred to be int) + num = 'x' # error: Incompatible types in assignment (expression has type "str", variable has type "int") + + dyn: Any = 1 # Dynamically typed (type Any) + dyn = 'x' # OK + + num = dyn # No error, mypy will let you assign a value of type Any to any variable + num += 1 # Oops, mypy still thinks num is an int + +You can think of ``Any`` as a way to locally disable type checking. +See :ref:`silencing-type-errors` for other ways you can shut up +the type checker. Operations on Any values ------------------------ -You can do anything using a value with type ``Any``, and type checker -does not complain: +You can do anything using a value with type ``Any``, and the type checker +will not complain: .. code-block:: python @@ -37,7 +49,7 @@ does not complain: open(x).read() return x -Values derived from an ``Any`` value also often have the type ``Any`` +Values derived from an ``Any`` value also usually have the type ``Any`` implicitly, as mypy can't infer a more precise result type. For example, if you get the attribute of an ``Any`` value or call a ``Any`` value the result is ``Any``: @@ -45,12 +57,45 @@ example, if you get the attribute of an ``Any`` value or call a .. code-block:: python def f(x: Any) -> None: - y = x.foo() # y has type Any - y.bar() # Okay as well! + y = x.foo() + reveal_type(y) # Revealed type is "Any" + z = y.bar("mypy will let you do anything to y") + reveal_type(z) # Revealed type is "Any" ``Any`` types may propagate through your program, making type checking less effective, unless you are careful. +Function parameters without annotations are also implicitly ``Any``: + +.. code-block:: python + + def f(x) -> None: + reveal_type(x) # Revealed type is "Any" + x.can.do["anything", x]("wants", 2) + +You can make mypy warn you about untyped function parameters using the +:option:`--disallow-untyped-defs ` flag. + +Generic types missing type parameters will have those parameters implicitly +treated as ``Any``: + +.. code-block:: python + + from typing import List + + def f(x: List) -> None: + reveal_type(x) # Revealed type is "builtins.list[Any]" + reveal_type(x[0]) # Revealed type is "Any" + x[0].anything_goes() # OK + +You can make mypy warn you about untyped function parameters using the +:option:`--disallow-any-generics ` flag. + +Finally, another major source of ``Any`` types leaking into your program is from +third party libraries that mypy does not know about. This is particularly the case +when using the :option:`--ignore-missing-imports ` +flag. See :ref:`fix-missing-imports` for more information about this. + Any vs. object -------------- @@ -80,6 +125,11 @@ operations: n: int = 1 n = o # Error! + +If you're not sure whether you need to use :py:class:`object` or ``Any``, use +:py:class:`object` -- only switch to using ``Any`` if you get a type checker +complaint. + You can use different :ref:`type narrowing ` techniques to narrow :py:class:`object` to a more specific type (subtype) such as ``int``. Type narrowing is not needed with diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 5c1f0bedb980..0388cd2165dd 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -89,6 +89,23 @@ This example accidentally calls ``sort()`` instead of :py:func:`sorted`: x = sort([3, 2, 4]) # Error: Name "sort" is not defined [name-defined] + +Check that a variable is not used before it's defined [used-before-def] +----------------------------------------------------------------------- + +Mypy will generate an error if a name is used before it's defined. +While the name-defined check will catch issues with names that are undefined, +it will not flag if a variable is used and then defined later in the scope. +used-before-def check will catch such cases. + +Example: + +.. code-block:: python + + print(x) # Error: Name "x" is used before definition [used-before-def] + x = 123 + + Check arguments in calls [call-arg] ----------------------------------- @@ -322,6 +339,35 @@ Example: # variable has type "str") [assignment] r.name = 5 +Check that assignment target is not a method [method-assign] +------------------------------------------------------------ + +In general, assigning to a method on class object or instance (a.k.a. +monkey-patching) is ambiguous in terms of types, since Python's static type +system cannot express difference between bound and unbound callable types. +Consider this example: + +.. code-block:: python + + class A: + def f(self) -> None: pass + def g(self) -> None: pass + + def h(self: A) -> None: pass + + A.f = h # type of h is Callable[[A], None] + A().f() # this works + A.f = A().g # type of A().g is Callable[[], None] + A().f() # but this also works at runtime + +To prevent the ambiguity, mypy will flag both assignments by default. If this +error code is disabled, mypy will treat all method assignments r.h.s. as unbound, +so the second assignment will still generate an error. + +.. note:: + + This error code is a sub-error code of a wider ``[assignment]`` code. + Check type variable values [type-var] ------------------------------------- @@ -431,6 +477,59 @@ Example: # TypedDict item "x" has type "int") [typeddict-item] p: Point = {'x': 1.2, 'y': 4} +Check TypedDict Keys [typeddict-unknown-key] +-------------------------------------------- + +When constructing a ``TypedDict`` object, mypy checks whether the definition +contains unknown keys. For convenience's sake, mypy will not generate an error +when a ``TypedDict`` has extra keys if it's passed to a function as an argument. +However, it will generate an error when these are created. Example: + +.. code-block:: python + + from typing_extensions import TypedDict + + class Point(TypedDict): + x: int + y: int + + class Point3D(Point): + z: int + + def add_x_coordinates(a: Point, b: Point) -> int: + return a["x"] + b["x"] + + a: Point = {"x": 1, "y": 4} + b: Point3D = {"x": 2, "y": 5, "z": 6} + + # OK + add_x_coordinates(a, b) + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] + add_x_coordinates(a, {"x": 1, "y": 4, "z": 5}) + + +Setting an unknown value on a ``TypedDict`` will also generate this error: + +.. code-block:: python + + a: Point = {"x": 1, "y": 2} + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] + a["z"] = 3 + + +Whereas reading an unknown value will generate the more generic/serious +``typeddict-item``: + +.. code-block:: python + + a: Point = {"x": 1, "y": 2} + # Error: TypedDict "Point" has no key "z" [typeddict-item] + _ = a["z"] + +.. note:: + + This error code is a sub-error code of a wider ``[typeddict-item]`` code. + Check that type of target is known [has-type] --------------------------------------------- @@ -564,6 +663,54 @@ Example: # Error: Cannot instantiate abstract class "Thing" with abstract attribute "save" [abstract] t = Thing() +Safe handling of abstract type object types [type-abstract] +----------------------------------------------------------- + +Mypy always allows instantiating (calling) type objects typed as ``Type[t]``, +even if it is not known that ``t`` is non-abstract, since it is a common +pattern to create functions that act as object factories (custom constructors). +Therefore, to prevent issues described in the above section, when an abstract +type object is passed where ``Type[t]`` is expected, mypy will give an error. +Example: + +.. code-block:: python + + from abc import ABCMeta, abstractmethod + from typing import List, Type, TypeVar + + class Config(metaclass=ABCMeta): + @abstractmethod + def get_value(self, attr: str) -> str: ... + + T = TypeVar("T") + def make_many(typ: Type[T], n: int) -> List[T]: + return [typ() for _ in range(n)] # This will raise if typ is abstract + + # Error: Only concrete class can be given where "Type[Config]" is expected [type-abstract] + make_many(Config, 5) + +Check that call to an abstract method via super is valid [safe-super] +--------------------------------------------------------------------- + +Abstract methods often don't have any default implementation, i.e. their +bodies are just empty. Calling such methods in subclasses via ``super()`` +will cause runtime errors, so mypy prevents you from doing so: + +.. code-block:: python + + from abc import abstractmethod + class Base: + @abstractmethod + def foo(self) -> int: ... + class Sub(Base): + def foo(self) -> int: + return super().foo() + 1 # error: Call to abstract method "foo" of "Base" with + # trivial body via super() is unsafe [safe-super] + Sub().foo() # This will crash at runtime. + +Mypy considers the following as trivial bodies: a ``pass`` statement, a literal +ellipsis ``...``, a docstring, and a ``raise NotImplementedError`` statement. + Check the target of NewType [valid-newtype] ------------------------------------------- @@ -657,6 +804,35 @@ consistently when using the call-based syntax. Example: # Error: First argument to namedtuple() should be "Point2D", not "Point" Point2D = NamedTuple("Point", [("x", int), ("y", int)]) +Check that literal is used where expected [literal-required] +------------------------------------------------------------ + +There are some places where only a (string) literal value is expected for +the purposes of static type checking, for example a ``TypedDict`` key, or +a ``__match_args__`` item. Providing a ``str``-valued variable in such contexts +will result in an error. Note however, in many cases you can use ``Final``, +or ``Literal`` variables, for example: + +.. code-block:: python + + from typing import Final, Literal, TypedDict + + class Point(TypedDict): + x: int + y: int + + def test(p: Point) -> None: + X: Final = "x" + p[X] # OK + + Y: Literal["y"] = "y" + p[Y] # OK + + key = "x" # Inferred type of key is `str` + # Error: TypedDict key must be a string literal; + # expected one of ("x", "y") [literal-required] + p[key] + Check that overloaded functions have an implementation [no-overload-impl] ------------------------------------------------------------------------- @@ -716,6 +892,19 @@ the provided type. assert_type([1], list[str]) # Error +Check that function isn't used in boolean context [truthy-function] +------------------------------------------------------------------- + +Functions will always evaluate to true in boolean contexts. + +.. code-block:: python + + def f(): + ... + + if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] + pass + Report syntax errors [syntax] ----------------------------- diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index 3938669edafc..f160515f0a9e 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -82,6 +82,28 @@ Example: # Error: Redundant cast to "int" [redundant-cast] return cast(int, x) +Check that methods do not have redundant Self annotations [redundant-self] +-------------------------------------------------------------------------- + +If a method uses the ``Self`` type in the return type or the type of a +non-self argument, there is no need to annotate the ``self`` argument +explicitly. Such annotations are allowed by :pep:`673` but are +redundant. If you enable this error code, mypy will generate an error if +there is a redundant ``Self`` type. + +Example: + +.. code-block:: python + + # mypy: enable-error-code="redundant-self" + + from typing import Self + + class C: + # Error: Redundant "Self" annotation for the first method argument + def copy(self: Self) -> Self: + return type(self)() + Check that comparisons are overlapping [comparison-overlap] ----------------------------------------------------------- @@ -217,45 +239,52 @@ mypy generates an error if it thinks that an expression is redundant. Check that expression is not implicitly true in boolean context [truthy-bool] ----------------------------------------------------------------------------- -Warn when an expression whose type does not implement ``__bool__`` or ``__len__`` is used in boolean context, -since unless implemented by a sub-type, the expression will always evaluate to true. +Warn when the type of an expression in a boolean context does not +implement ``__bool__`` or ``__len__``. Unless one of these is +implemented by a subtype, the expression will always be considered +true, and there may be a bug in the condition. + +As an exception, the ``object`` type is allowed in a boolean context. +Using an iterable value in a boolean context has a separate error code +(see below). .. code-block:: python # Use "mypy --enable-error-code truthy-bool ..." class Foo: - pass + pass foo = Foo() # Error: "foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context if foo: - ... + ... -This check might falsely imply an error. For example, ``Iterable`` does not implement -``__len__`` and so this code will be flagged: +Check that iterable is not implicitly true in boolean context [truthy-iterable] +------------------------------------------------------------------------------- -.. code-block:: python +Generate an error if a value of type ``Iterable`` is used as a boolean +condition, since ``Iterable`` does not implement ``__len__`` or ``__bool__``. + +Example: - # Use "mypy -enable-error-code truthy-bool ..." +.. code-block:: python from typing import Iterable - def transform(items: Iterable[int]) -> Iterable[int]: - # Error: "items" has type "Iterable[int]" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] + def transform(items: Iterable[int]) -> list[int]: + # Error: "items" has type "Iterable[int]" which can always be true in boolean context. Consider using "Collection[int]" instead. [truthy-iterable] if not items: return [42] return [x + 1 for x in items] - - -If called as ``transform((int(s) for s in []))``, this function would not return ``[42]`` unlike what the author -might have intended. Of course it's possible that ``transform`` is only passed ``list`` objects, and so there is -no error in practice. In such case, it might be prudent to annotate ``items: Sequence[int]``. - -This is similar in concept to ensuring that an expression's type implements an expected interface (e.g. ``Sized``), -except that attempting to invoke an undefined method (e.g. ``__len__``) results in an error, -while attempting to evaluate an object in boolean context without a concrete implementation results in a truthy value. +If ``transform`` is called with a ``Generator`` argument, such as +``int(x) for x in []``, this function would not return ``[42]`` unlike +what might be intended. Of course, it's possible that ``transform`` is +only called with ``list`` or other container objects, and the ``if not +items`` check is actually valid. If that is the case, it is +recommended to annotate ``items`` as ``Collection[int]`` instead of +``Iterable[int]``. .. _ignore-without-code: diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst index bed73abc379f..34bb8ab6b5e1 100644 --- a/docs/source/error_codes.rst +++ b/docs/source/error_codes.rst @@ -23,12 +23,12 @@ Error codes may change in future mypy releases. Displaying error codes ---------------------- -Error codes are not displayed by default. Use :option:`--show-error-codes ` -or config ``show_error_codes = True`` to display error codes. Error codes are shown inside square brackets: +Error codes are displayed by default. Use :option:`--hide-error-codes ` +or config ``hide_error_codes = True`` to hide error codes. Error codes are shown inside square brackets: .. code-block:: text - $ mypy --show-error-codes prog.py + $ mypy prog.py prog.py:1: error: "str" has no attribute "trim" [attr-defined] It's also possible to require error codes for ``type: ignore`` comments. @@ -69,3 +69,58 @@ which enables the ``no-untyped-def`` error code. You can use :option:`--enable-error-code ` to enable specific error codes that don't have a dedicated command-line flag or config file setting. + +Per-module enabling/disabling error codes +----------------------------------------- + +You can use :ref:`configuration file ` sections to enable or +disable specific error codes only in some modules. For example, this ``mypy.ini`` +config will enable non-annotated empty containers in tests, while keeping +other parts of code checked in strict mode: + +.. code-block:: ini + + [mypy] + strict = True + + [mypy-tests.*] + allow_untyped_defs = True + allow_untyped_calls = True + disable_error_code = var-annotated, has-type + +Note that per-module enabling/disabling acts as override over the global +options. So that you don't need to repeat the error code lists for each +module if you have them in global config section. For example: + +.. code-block:: ini + + [mypy] + enable_error_code = truthy-bool, ignore-without-code, unused-awaitable + + [mypy-extensions.*] + disable_error_code = unused-awaitable + +The above config will allow unused awaitables in extension modules, but will +still keep the other two error codes enabled. The overall logic is following: + +* Command line and/or config main section set global error codes + +* Individual config sections *adjust* them per glob/module + +* Inline ``# mypy: ...`` comments can further *adjust* them for a specific + module + +So one can e.g. enable some code globally, disable it for all tests in +the corresponding config section, and then re-enable it with an inline +comment in some specific test. + +Sub-error codes of other error codes +------------------------------------ + +In rare cases (mostly for backwards compatibility reasons), some error +code may be covered by another, wider error code. For example, an error with +code ``[method-assign]`` can be ignored by ``# type: ignore[assignment]``. +Similar logic works for disabling error codes globally. If a given error code +is a sub code of another one, it must mentioned in the docs for the narrower +code. This hierarchy is not nested, there cannot be sub-error codes of other +sub-error codes. diff --git a/docs/source/existing_code.rst b/docs/source/existing_code.rst index 66259e5e94c7..410d7af0c350 100644 --- a/docs/source/existing_code.rst +++ b/docs/source/existing_code.rst @@ -7,38 +7,78 @@ This section explains how to get started using mypy with an existing, significant codebase that has little or no type annotations. If you are a beginner, you can skip this section. -These steps will get you started with mypy on an existing codebase: +Start small +----------- -1. Start small -- get a clean mypy build for some files, with few - annotations +If your codebase is large, pick a subset of your codebase (say, 5,000 to 50,000 +lines) and get mypy to run successfully only on this subset at first, *before +adding annotations*. This should be doable in a day or two. The sooner you get +some form of mypy passing on your codebase, the sooner you benefit. -2. Write a mypy runner script to ensure consistent results +You'll likely need to fix some mypy errors, either by inserting +annotations requested by mypy or by adding ``# type: ignore`` +comments to silence errors you don't want to fix now. -3. Run mypy in Continuous Integration to prevent type errors +We'll mention some tips for getting mypy passing on your codebase in various +sections below. -4. Gradually annotate commonly imported modules +Run mypy consistently and prevent regressions +--------------------------------------------- -5. Write annotations as you modify existing code and write new code +Make sure all developers on your codebase run mypy the same way. +One way to ensure this is adding a small script with your mypy +invocation to your codebase, or adding your mypy invocation to +existing tools you use to run tests, like ``tox``. -6. Use :doc:`monkeytype:index` or `PyAnnotate`_ to automatically annotate legacy code +* Make sure everyone runs mypy with the same options. Checking a mypy + :ref:`configuration file ` into your codebase can help + with this. -We discuss all of these points in some detail below, and a few optional -follow-up steps. +* Make sure everyone type checks the same set of files. See + :ref:`specifying-code-to-be-checked` for details. -Start small ------------ +* Make sure everyone runs mypy with the same version of mypy, for instance + by pinning mypy with the rest of your dev requirements. -If your codebase is large, pick a subset of your codebase (say, 5,000 -to 50,000 lines) and run mypy only on this subset at first, -*without any annotations*. This shouldn't take more than a day or two -to implement, so you start enjoying benefits soon. +In particular, you'll want to make sure to run mypy as part of your +Continuous Integration (CI) system as soon as possible. This will +prevent new type errors from being introduced into your codebase. -You'll likely need to fix some mypy errors, either by inserting -annotations requested by mypy or by adding ``# type: ignore`` -comments to silence errors you don't want to fix now. +A simple CI script could look something like this: + +.. code-block:: text + + python3 -m pip install mypy==0.971 + # Run your standardised mypy invocation, e.g. + mypy my_project + # This could also look like `scripts/run_mypy.sh`, `tox run -e mypy`, `make mypy`, etc + +Ignoring errors from certain modules +------------------------------------ -In particular, mypy often generates errors about modules that it can't -find or that don't have stub files: +By default mypy will follow imports in your code and try to check everything. +This means even if you only pass in a few files to mypy, it may still process a +large number of imported files. This could potentially result in lots of errors +you don't want to deal with at the moment. + +One way to deal with this is to ignore errors in modules you aren't yet ready to +type check. The :confval:`ignore_errors` option is useful for this, for instance, +if you aren't yet ready to deal with errors from ``package_to_fix_later``: + +.. code-block:: text + + [mypy-package_to_fix_later.*] + ignore_errors = True + +You could even invert this, by setting ``ignore_errors = True`` in your global +config section and only enabling error reporting with ``ignore_errors = False`` +for the set of modules you are ready to type check. + +Fixing errors related to imports +-------------------------------- + +A common class of error you will encounter is errors from mypy about modules +that it can't find, that don't have types, or don't have stub files: .. code-block:: text @@ -46,7 +86,15 @@ find or that don't have stub files: core/model.py:9: error: Cannot find implementation or library stub for module named 'acme' ... -This is normal, and you can easily ignore these errors. For example, +Sometimes these can be fixed by installing the relevant packages or +stub libraries in the environment you're running ``mypy`` in. + +See :ref:`ignore-missing-imports` for a complete reference on these errors +and the ways in which you can fix them. + +You'll likely find that you want to suppress all errors from importing +a given module that doesn't have types. If you only import that module +in one or two places, you can use ``# type: ignore`` comments. For example, here we ignore an error about a third-party module ``frobnicate`` that doesn't have stubs using ``# type: ignore``: @@ -56,9 +104,9 @@ doesn't have stubs using ``# type: ignore``: ... frobnicate.initialize() # OK (but not checked) -You can also use a mypy configuration file, which is convenient if -there are a large number of errors to ignore. For example, to disable -errors about importing ``frobnicate`` and ``acme`` everywhere in your +But if you import the module in many places, this becomes unwieldy. In this +case, we recommend using a :ref:`configuration file `. For example, +to disable errors about importing ``frobnicate`` and ``acme`` everywhere in your codebase, use a config like this: .. code-block:: text @@ -69,69 +117,33 @@ codebase, use a config like this: [mypy-acme.*] ignore_missing_imports = True -You can add multiple sections for different modules that should be -ignored. - -If your config file is named ``mypy.ini``, this is how you run mypy: - -.. code-block:: text - - mypy --config-file mypy.ini mycode/ - If you get a large number of errors, you may want to ignore all errors -about missing imports. This can easily cause problems later on and -hide real errors, and it's only recommended as a last resort. -For more details, look :ref:`here `. - -Mypy follows imports by default. This can result in a few files passed -on the command line causing mypy to process a large number of imported -files, resulting in lots of errors you don't want to deal with at the -moment. There is a config file option to disable this behavior, but -since this can hide errors, it's not recommended for most users. - -Mypy runner script ------------------- - -Introduce a mypy runner script that runs mypy, so that every developer -will use mypy consistently. Here are some things you may want to do in -the script: - -* Ensure that the correct version of mypy is installed. - -* Specify mypy config file or command-line options. - -* Provide set of files to type check. You may want to implement - inclusion and exclusion filters for full control of the file - list. - -Continuous Integration ----------------------- - -Once you have a clean mypy run and a runner script for a part -of your codebase, set up your Continuous Integration (CI) system to -run mypy to ensure that developers won't introduce bad annotations. -A simple CI script could look something like this: +about missing imports, for instance by setting :confval:`ignore_missing_imports` +to true globally. This can hide errors later on, so we recommend avoiding this +if possible. -.. code-block:: text +Finally, mypy allows fine-grained control over specific import following +behaviour. It's very easy to silently shoot yourself in the foot when playing +around with these, so it's mostly recommended as a last resort. For more +details, look :ref:`here `. - python3 -m pip install mypy==0.790 # Pinned version avoids surprises - scripts/mypy # Run the mypy runner script you set up - -Annotate widely imported modules --------------------------------- +Prioritise annotating widely imported modules +--------------------------------------------- Most projects have some widely imported modules, such as utilities or model classes. It's a good idea to annotate these pretty early on, since this allows code using these modules to be type checked more -effectively. Since mypy supports gradual typing, it's okay to leave -some of these modules unannotated. The more you annotate, the more -useful mypy will be, but even a little annotation coverage is useful. +effectively. + +Mypy is designed to support gradual typing, i.e. letting you add annotations at +your own pace, so it's okay to leave some of these modules unannotated. The more +you annotate, the more useful mypy will be, but even a little annotation +coverage is useful. Write annotations as you go --------------------------- -Now you are ready to include type annotations in your development -workflows. Consider adding something like these in your code style +Consider adding something like these in your code style conventions: 1. Developers should add annotations for any new code. @@ -143,9 +155,9 @@ codebase without much effort. Automate annotation of legacy code ---------------------------------- -There are tools for automatically adding draft annotations -based on type profiles collected at runtime. Tools include -:doc:`monkeytype:index` (Python 3) and `PyAnnotate`_. +There are tools for automatically adding draft annotations based on simple +static analysis or on type profiles collected at runtime. Tools include +:doc:`monkeytype:index`, `autotyping`_ and `PyAnnotate`_. A simple approach is to collect types from test runs. This may work well if your test coverage is good (and if your tests aren't very @@ -156,14 +168,7 @@ fraction of production network requests. This clearly requires more care, as type collection could impact the reliability or the performance of your service. -Speed up mypy runs ------------------- - -You can use :ref:`mypy daemon ` to get much faster -incremental mypy runs. The larger your project is, the more useful -this will be. If your project has at least 100,000 lines of code or -so, you may also want to set up :ref:`remote caching ` -for further speedups. +.. _getting-to-strict: Introduce stricter options -------------------------- @@ -172,7 +177,69 @@ Mypy is very configurable. Once you get started with static typing, you may want to explore the various strictness options mypy provides to catch more bugs. For example, you can ask mypy to require annotations for all functions in certain modules to avoid accidentally introducing code that won't be type checked using -:confval:`disallow_untyped_defs`, or type check code without annotations as well -with :confval:`check_untyped_defs`. Refer to :ref:`config-file` for the details. +:confval:`disallow_untyped_defs`. Refer to :ref:`config-file` for the details. + +An excellent goal to aim for is to have your codebase pass when run against ``mypy --strict``. +This basically ensures that you will never have a type related error without an explicit +circumvention somewhere (such as a ``# type: ignore`` comment). + +The following config is equivalent to ``--strict`` (as of mypy 0.990): + +.. code-block:: text + + # Start off with these + warn_unused_configs = True + warn_redundant_casts = True + warn_unused_ignores = True + no_implicit_optional = True + + # Getting these passing should be easy + strict_equality = True + strict_concatenate = True + + # Strongly recommend enabling this one as soon as you can + check_untyped_defs = True + + # These shouldn't be too much additional work, but may be tricky to + # get passing if you use a lot of untyped libraries + disallow_subclassing_any = True + disallow_untyped_decorators = True + disallow_any_generics = True + + # These next few are various gradations of forcing use of type annotations + disallow_untyped_calls = True + disallow_incomplete_defs = True + disallow_untyped_defs = True + + # This one isn't too hard to get passing, but return on investment is lower + no_implicit_reexport = True + + # This one can be tricky to get passing if you use a lot of untyped libraries + warn_return_any = True + +Note that you can also start with ``--strict`` and subtract, for instance: + +.. code-block:: text + + strict = True + warn_return_any = False + +Remember that many of these options can be enabled on a per-module basis. For instance, +you may want to enable ``disallow_untyped_defs`` for modules which you've completed +annotations for, in order to prevent new code from being added without annotations. + +And if you want, it doesn't stop at ``--strict``. Mypy has additional checks +that are not part of ``--strict`` that can be useful. See the complete +:ref:`command-line` reference and :ref:`error-codes-optional`. + +Speed up mypy runs +------------------ + +You can use :ref:`mypy daemon ` to get much faster +incremental mypy runs. The larger your project is, the more useful +this will be. If your project has at least 100,000 lines of code or +so, you may also want to set up :ref:`remote caching ` +for further speedups. .. _PyAnnotate: https://github.com/dropbox/pyannotate +.. _autotyping: https://github.com/JelleZijlstra/autotyping diff --git a/docs/source/extending_mypy.rst b/docs/source/extending_mypy.rst index 00c328be7728..daf863616334 100644 --- a/docs/source/extending_mypy.rst +++ b/docs/source/extending_mypy.rst @@ -155,23 +155,9 @@ When analyzing this code, mypy will call ``get_type_analyze_hook("lib.Vector")`` so the plugin can return some valid type for each variable. **get_function_hook()** is used to adjust the return type of a function call. -This is a good choice if the return type of some function depends on *values* -of some arguments that can't be expressed using literal types (for example -a function may return an ``int`` for positive arguments and a ``float`` for -negative arguments). This hook will be also called for instantiation of classes. -For example: - -.. code-block:: python - - from contextlib import contextmanager - from typing import TypeVar, Callable - - T = TypeVar('T') - - @contextmanager # built-in plugin can infer a precise type here - def stopwatch(timer: Callable[[], T]) -> Iterator[T]: - ... - yield timer() +This hook will be also called for instantiation of classes. +This is a good choice if the return type is too complex +to be expressed by regular python typing. **get_function_signature_hook** is used to adjust the signature of a function. @@ -251,31 +237,3 @@ mypy's cache for that module so that it can be rechecked. This hook should be used to report to mypy any relevant configuration data, so that mypy knows to recheck the module if the configuration changes. The hooks should return data encodable as JSON. - -Notes about the semantic analyzer -********************************* - -Mypy 0.710 introduced a new semantic analyzer, and the old semantic -analyzer was removed in mypy 0.730. Support for the new semantic analyzer -required some changes to existing plugins. Here is a short summary of the -most important changes: - -* The order of processing AST nodes is different. Code outside - functions is processed first, and functions and methods are - processed afterwards. - -* Each AST node can be processed multiple times to resolve forward - references. The same plugin hook may be called multiple times, so - they need to be idempotent. - -* The ``anal_type()`` API method returns ``None`` if some part of - the type is not available yet due to forward references, for example. - -* When looking up symbols, you may encounter *placeholder nodes* that - are used for names that haven't been fully processed yet. You'll - generally want to request another semantic analysis iteration by - *deferring* in that case. - -See the docstring at the top of -`mypy/plugin.py `_ -for more details. diff --git a/docs/source/generics.rst b/docs/source/generics.rst index 3ae616f78691..9ac79f90121d 100644 --- a/docs/source/generics.rst +++ b/docs/source/generics.rst @@ -50,17 +50,9 @@ Using ``Stack`` is similar to built-in container types: stack = Stack[int]() stack.push(2) stack.pop() - stack.push('x') # Type error + stack.push('x') # error: Argument 1 to "push" of "Stack" has incompatible type "str"; expected "int" -Type inference works for user-defined generic types as well: - -.. code-block:: python - - def process(stack: Stack[int]) -> None: ... - - process(Stack()) # Argument has inferred type Stack[int] - -Construction of instances of generic types is also type checked: +Construction of instances of generic types is type checked: .. code-block:: python @@ -68,77 +60,17 @@ Construction of instances of generic types is also type checked: def __init__(self, content: T) -> None: self.content = content - Box(1) # OK, inferred type is Box[int] + Box(1) # OK, inferred type is Box[int] Box[int](1) # Also OK - s = 'some string' - Box[int](s) # Type error - -Generic class internals -*********************** - -You may wonder what happens at runtime when you index -``Stack``. Indexing ``Stack`` returns a *generic alias* -to ``Stack`` that returns instances of the original class on -instantiation: - -.. code-block:: python - - >>> print(Stack) - __main__.Stack - >>> print(Stack[int]) - __main__.Stack[int] - >>> print(Stack[int]().__class__) - __main__.Stack - -Generic aliases can be instantiated or subclassed, similar to real -classes, but the above examples illustrate that type variables are -erased at runtime. Generic ``Stack`` instances are just ordinary -Python objects, and they have no extra runtime overhead or magic due -to being generic, other than a metaclass that overloads the indexing -operator. - -Note that in Python 3.8 and lower, the built-in types -:py:class:`list`, :py:class:`dict` and others do not support indexing. -This is why we have the aliases :py:class:`~typing.List`, -:py:class:`~typing.Dict` and so on in the :py:mod:`typing` -module. Indexing these aliases gives you a generic alias that -resembles generic aliases constructed by directly indexing the target -class in more recent versions of Python: - -.. code-block:: python - - >>> # Only relevant for Python 3.8 and below - >>> # For Python 3.9 onwards, prefer `list[int]` syntax - >>> from typing import List - >>> List[int] - typing.List[int] - -Note that the generic aliases in ``typing`` don't support constructing -instances: - -.. code-block:: python - - >>> from typing import List - >>> List[int]() - Traceback (most recent call last): - ... - TypeError: Type List cannot be instantiated; use list() instead - -.. note:: - - In Python 3.6 indexing generic types or type aliases results in actual - type objects. This means that generic types in type annotations can - have a significant runtime cost. This was changed in Python 3.7, and - indexing generic types became a cheap operation. + Box[int]('some string') # error: Argument 1 to "Box" has incompatible type "str"; expected "int" .. _generic-subclasses: -Defining sub-classes of generic classes -*************************************** +Defining subclasses of generic classes +************************************** User-defined generic classes and generic classes defined in :py:mod:`typing` -can be used as base classes for another classes, both generic and -non-generic. For example: +can be used as a base class for another class (generic or non-generic). For example: .. code-block:: python @@ -147,29 +79,29 @@ non-generic. For example: KT = TypeVar('KT') VT = TypeVar('VT') - class MyMap(Mapping[KT, VT]): # This is a generic subclass of Mapping - def __getitem__(self, k: KT) -> VT: - ... # Implementations omitted - def __iter__(self) -> Iterator[KT]: - ... - def __len__(self) -> int: - ... + # This is a generic subclass of Mapping + class MyMap(Mapping[KT, VT]): + def __getitem__(self, k: KT) -> VT: ... + def __iter__(self) -> Iterator[KT]: ... + def __len__(self) -> int: ... - items: MyMap[str, int] # Okay + items: MyMap[str, int] # OK - class StrDict(dict[str, str]): # This is a non-generic subclass of dict + # This is a non-generic subclass of dict + class StrDict(dict[str, str]): def __str__(self) -> str: return f'StrDict({super().__str__()})' + data: StrDict[int, int] # Error! StrDict is not generic data2: StrDict # OK + # This is a user-defined generic class class Receiver(Generic[T]): - def accept(self, value: T) -> None: - ... + def accept(self, value: T) -> None: ... - class AdvancedReceiver(Receiver[T]): - ... + # This is a generic subclass of Receiver + class AdvancedReceiver(Receiver[T]): ... .. note:: @@ -215,15 +147,16 @@ For example: Generic functions ***************** -Generic type variables can also be used to define generic functions: +Type variables can be used to define generic functions: .. code-block:: python from typing import TypeVar, Sequence - T = TypeVar('T') # Declare type variable + T = TypeVar('T') - def first(seq: Sequence[T]) -> T: # Generic function + # A generic function! + def first(seq: Sequence[T]) -> T: return seq[0] As with generic classes, the type variable can be replaced with any @@ -232,10 +165,8 @@ return type is derived from the sequence item type. For example: .. code-block:: python - # Assume first defined as above. - - s = first('foo') # s has type str. - n = first([1, 2, 3]) # n has type int. + reveal_type(first([1, 2, 3])) # Revealed type is "builtins.int" + reveal_type(first(['a', 'b'])) # Revealed type is "builtins.str" Note also that a single definition of a type variable (such as ``T`` above) can be used in multiple generic functions or classes. In this @@ -262,17 +193,11 @@ Generic methods and generic self ******************************** You can also define generic methods — just use a type variable in the -method signature that is different from class type variables. In particular, -``self`` may also be generic, allowing a method to return the most precise -type known at the point of access. - -.. note:: - - This feature is experimental. Checking code with type annotations for self - arguments is still not fully implemented. Mypy may disallow valid code or - allow unsafe code. - -In this way, for example, you can typecheck chaining of setter methods: +method signature that is different from class type variables. In +particular, the ``self`` argument may also be generic, allowing a +method to return the most precise type known at the point of access. +In this way, for example, you can type check a chain of setter +methods: .. code-block:: python @@ -298,7 +223,9 @@ In this way, for example, you can typecheck chaining of setter methods: circle: Circle = Circle().set_scale(0.5).set_radius(2.7) square: Square = Square().set_scale(0.5).set_width(3.2) -Without using generic ``self``, the last two lines could not be type-checked properly. +Without using generic ``self``, the last two lines could not be type +checked properly, since the return type of ``set_scale`` would be +``Shape``, which doesn't define ``set_radius`` or ``set_width``. Other uses are factory methods, such as copy and deserialization. For class methods, you can also define generic ``cls``, using :py:class:`Type[T] `: @@ -331,9 +258,74 @@ In the latter case, you must implement this method in all future subclasses. Note also that mypy cannot always verify that the implementation of a copy or a deserialization method returns the actual type of self. Therefore you may need to silence mypy inside these methods (but not at the call site), -possibly by making use of the ``Any`` type. +possibly by making use of the ``Any`` type or a ``# type: ignore`` comment. + +Note that mypy lets you use generic self types in certain unsafe ways +in order to support common idioms. For example, using a generic +self type in an argument type is accepted even though it's unsafe: + +.. code-block:: python + + from typing import TypeVar + + T = TypeVar("T") + + class Base: + def compare(self: T, other: T) -> bool: + return False + + class Sub(Base): + def __init__(self, x: int) -> None: + self.x = x + + # This is unsafe (see below) but allowed because it's + # a common pattern and rarely causes issues in practice. + def compare(self, other: Sub) -> bool: + return self.x > other.x -For some advanced uses of self-types see :ref:`additional examples `. + b: Base = Sub(42) + b.compare(Base()) # Runtime error here: 'Base' object has no attribute 'x' + +For some advanced uses of self types, see :ref:`additional examples `. + +Automatic self types using typing.Self +************************************** + +Since the patterns described above are quite common, mypy supports a +simpler syntax, introduced in :pep:`673`, to make them easier to use. +Instead of defining a type variable and using an explicit annotation +for ``self``, you can import the special type ``typing.Self`` that is +automatically transformed into a type variable with the current class +as the upper bound, and you don't need an annotation for ``self`` (or +``cls`` in class methods). The example from the previous section can +be made simpler by using ``Self``: + +.. code-block:: python + + from typing import Self + + class Friend: + other: Self | None = None + + @classmethod + def make_pair(cls) -> tuple[Self, Self]: + a, b = cls(), cls() + a.other = b + b.other = a + return a, b + + class SuperFriend(Friend): + pass + + a, b = SuperFriend.make_pair() + +This is more compact than using explicit type variables. Also, you can +use ``Self`` in attribute annotations in addition to methods. + +.. note:: + + To use this feature on Python versions earlier than 3.11, you will need to + import ``Self`` from ``typing_extensions`` (version 4.0 or newer). .. _variance-of-generics: @@ -345,51 +337,84 @@ relations between them: invariant, covariant, and contravariant. Assuming that we have a pair of types ``A`` and ``B``, and ``B`` is a subtype of ``A``, these are defined as follows: -* A generic class ``MyCovGen[T, ...]`` is called covariant in type variable - ``T`` if ``MyCovGen[B, ...]`` is always a subtype of ``MyCovGen[A, ...]``. -* A generic class ``MyContraGen[T, ...]`` is called contravariant in type - variable ``T`` if ``MyContraGen[A, ...]`` is always a subtype of - ``MyContraGen[B, ...]``. -* A generic class ``MyInvGen[T, ...]`` is called invariant in ``T`` if neither +* A generic class ``MyCovGen[T]`` is called covariant in type variable + ``T`` if ``MyCovGen[B]`` is always a subtype of ``MyCovGen[A]``. +* A generic class ``MyContraGen[T]`` is called contravariant in type + variable ``T`` if ``MyContraGen[A]`` is always a subtype of + ``MyContraGen[B]``. +* A generic class ``MyInvGen[T]`` is called invariant in ``T`` if neither of the above is true. Let us illustrate this by few simple examples: -* :py:data:`~typing.Union` is covariant in all variables: ``Union[Cat, int]`` is a subtype - of ``Union[Animal, int]``, - ``Union[Dog, int]`` is also a subtype of ``Union[Animal, int]``, etc. - Most immutable containers such as :py:class:`~typing.Sequence` and :py:class:`~typing.FrozenSet` are also - covariant. -* :py:data:`~typing.Callable` is an example of type that behaves contravariant in types of - arguments, namely ``Callable[[Employee], int]`` is a subtype of - ``Callable[[Manager], int]``. To understand this, consider a function: +.. code-block:: python + + # We'll use these classes in the examples below + class Shape: ... + class Triangle(Shape): ... + class Square(Shape): ... + +* Most immutable containers, such as :py:class:`~typing.Sequence` and + :py:class:`~typing.FrozenSet` are covariant. :py:data:`~typing.Union` is + also covariant in all variables: ``Union[Triangle, int]`` is + a subtype of ``Union[Shape, int]``. .. code-block:: python - def salaries(staff: list[Manager], - accountant: Callable[[Manager], int]) -> list[int]: ... + def count_lines(shapes: Sequence[Shape]) -> int: + return sum(shape.num_sides for shape in shapes) - This function needs a callable that can calculate a salary for managers, and - if we give it a callable that can calculate a salary for an arbitrary - employee, it's still safe. -* :py:class:`~typing.List` is an invariant generic type. Naively, one would think - that it is covariant, but let us consider this code: + triangles: Sequence[Triangle] + count_lines(triangles) # OK + + def foo(triangle: Triangle, num: int): + shape_or_number: Union[Shape, int] + # a Triangle is a Shape, and a Shape is a valid Union[Shape, int] + shape_or_number = triangle + + Covariance should feel relatively intuitive, but contravariance and invariance + can be harder to reason about. + +* :py:data:`~typing.Callable` is an example of type that behaves contravariant + in types of arguments. That is, ``Callable[[Shape], int]`` is a subtype of + ``Callable[[Triangle], int]``, despite ``Shape`` being a supertype of + ``Triangle``. To understand this, consider: .. code-block:: python - class Shape: - pass + def cost_of_paint_required( + triangle: Triangle, + area_calculator: Callable[[Triangle], float] + ) -> float: + return area_calculator(triangle) * DOLLAR_PER_SQ_FT + + # This straightforwardly works + def area_of_triangle(triangle: Triangle) -> float: ... + cost_of_paint_required(triangle, area_of_triangle) # OK + + # But this works as well! + def area_of_any_shape(shape: Shape) -> float: ... + cost_of_paint_required(triangle, area_of_any_shape) # OK + + ``cost_of_paint_required`` needs a callable that can calculate the area of a + triangle. If we give it a callable that can calculate the area of an + arbitrary shape (not just triangles), everything still works. + +* :py:class:`~typing.List` is an invariant generic type. Naively, one would think + that it is covariant, like :py:class:`~typing.Sequence` above, but consider this code: + + .. code-block:: python class Circle(Shape): - def rotate(self): - ... + # The rotate method is only defined on Circle, not on Shape + def rotate(self): ... def add_one(things: list[Shape]) -> None: things.append(Shape()) - my_things: list[Circle] = [] - add_one(my_things) # This may appear safe, but... - my_things[0].rotate() # ...this will fail + my_circles: list[Circle] = [] + add_one(my_circles) # This may appear safe, but... + my_circles[-1].rotate() # ...this will fail, since my_circles[0] is now a Shape, not a Circle Another example of invariant type is :py:class:`~typing.Dict`. Most mutable containers are invariant. @@ -417,6 +442,45 @@ type variables defined with special keyword arguments ``covariant`` or my_box = Box(Cat()) look_into(my_box) # OK, but mypy would complain here for an invariant type +.. _type-variable-upper-bound: + +Type variables with upper bounds +******************************** + +A type variable can also be restricted to having values that are +subtypes of a specific type. This type is called the upper bound of +the type variable, and is specified with the ``bound=...`` keyword +argument to :py:class:`~typing.TypeVar`. + +.. code-block:: python + + from typing import TypeVar, SupportsAbs + + T = TypeVar('T', bound=SupportsAbs[float]) + +In the definition of a generic function that uses such a type variable +``T``, the type represented by ``T`` is assumed to be a subtype of +its upper bound, so the function can use methods of the upper bound on +values of type ``T``. + +.. code-block:: python + + def largest_in_absolute_value(*xs: T) -> T: + return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float]. + +In a call to such a function, the type ``T`` must be replaced by a +type that is a subtype of its upper bound. Continuing the example +above: + +.. code-block:: python + + largest_in_absolute_value(-3.5, 2) # Okay, has type float. + largest_in_absolute_value(5+6j, 7) # Okay, has type complex. + largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float]. + +Type parameters of generic classes may also have upper bounds, which +restrict the valid values for the type parameter in the same way. + .. _type-variable-value-restriction: Type variables with value restriction @@ -451,7 +515,7 @@ argument types: concat(b'a', b'b') # Okay concat(1, 2) # Error! -Note that this is different from a union type, since combinations +Importantly, this is different from a union type, since combinations of ``str`` and ``bytes`` are not accepted: .. code-block:: python @@ -459,8 +523,8 @@ of ``str`` and ``bytes`` are not accepted: concat('string', b'bytes') # Error! In this case, this is exactly what we want, since it's not possible -to concatenate a string and a bytes object! The type checker -will reject this function: +to concatenate a string and a bytes object! If we tried to use +``Union``, the type checker would complain about this possibility: .. code-block:: python @@ -475,10 +539,13 @@ subtype of ``str``: class S(str): pass ss = concat(S('foo'), S('bar')) + reveal_type(ss) # Revealed type is "builtins.str" You may expect that the type of ``ss`` is ``S``, but the type is actually ``str``: a subtype gets promoted to one of the valid values -for the type variable, which in this case is ``str``. This is thus +for the type variable, which in this case is ``str``. + +This is thus subtly different from *bounded quantification* in languages such as Java, where the return type would be ``S``. The way mypy implements this is correct for ``concat``, since ``concat`` actually returns a @@ -494,66 +561,25 @@ values when defining a generic class. For example, mypy uses the type :py:class:`Pattern[AnyStr] ` for the return value of :py:func:`re.compile`, since regular expressions can be based on a string or a bytes pattern. -.. _type-variable-upper-bound: - -Type variables with upper bounds -******************************** - -A type variable can also be restricted to having values that are -subtypes of a specific type. This type is called the upper bound of -the type variable, and is specified with the ``bound=...`` keyword -argument to :py:class:`~typing.TypeVar`. - -.. code-block:: python - - from typing import TypeVar, SupportsAbs - - T = TypeVar('T', bound=SupportsAbs[float]) - -In the definition of a generic function that uses such a type variable -``T``, the type represented by ``T`` is assumed to be a subtype of -its upper bound, so the function can use methods of the upper bound on -values of type ``T``. - -.. code-block:: python - - def largest_in_absolute_value(*xs: T) -> T: - return max(xs, key=abs) # Okay, because T is a subtype of SupportsAbs[float]. - -In a call to such a function, the type ``T`` must be replaced by a -type that is a subtype of its upper bound. Continuing the example -above, - -.. code-block:: python - - largest_in_absolute_value(-3.5, 2) # Okay, has type float. - largest_in_absolute_value(5+6j, 7) # Okay, has type complex. - largest_in_absolute_value('a', 'b') # Error: 'str' is not a subtype of SupportsAbs[float]. - -Type parameters of generic classes may also have upper bounds, which -restrict the valid values for the type parameter in the same way. - A type variable may not have both a value restriction (see -:ref:`type-variable-value-restriction`) and an upper bound. +:ref:`type-variable-upper-bound`) and an upper bound. .. _declaring-decorators: Declaring decorators ******************** -One common application of type variables along with parameter specifications -is in declaring a decorator that preserves the signature of the function it decorates. - -Note that class decorators are handled differently than function decorators in -mypy: decorating a class does not erase its type, even if the decorator has -incomplete type annotations. +Decorators are typically functions that take a function as an argument and +return another function. Describing this behaviour in terms of types can +be a little tricky; we'll show how you can use ``TypeVar`` and a special +kind of type variable called a *parameter specification* to do so. -Suppose we have the following decorator, not type annotated yet, +Suppose we have the following decorator, not type annotated yet, that preserves the original function's signature and merely prints the decorated function's name: .. code-block:: python - def my_decorator(func): + def printing_decorator(func): def wrapper(*args, **kwds): print("Calling", func) return func(*args, **kwds) @@ -564,72 +590,88 @@ and we use it to decorate function ``add_forty_two``: .. code-block:: python # A decorated function. - @my_decorator + @printing_decorator def add_forty_two(value: int) -> int: return value + 42 a = add_forty_two(3) -Since ``my_decorator`` is not type-annotated, the following won't get type-checked: +Since ``printing_decorator`` is not type-annotated, the following won't get type checked: .. code-block:: python - reveal_type(a) # revealed type: Any - add_forty_two('foo') # no type-checker error :( + reveal_type(a) # Revealed type is "Any" + add_forty_two('foo') # No type checker error :( + +This is a sorry state of affairs! If you run with ``--strict``, mypy will +even alert you to this fact: +``Untyped decorator makes function "add_forty_two" untyped`` -Before parameter specifications, here's how one might have annotated the decorator: +Note that class decorators are handled differently than function decorators in +mypy: decorating a class does not erase its type, even if the decorator has +incomplete type annotations. + +Here's how one could annotate the decorator: .. code-block:: python - from typing import Callable, TypeVar + from typing import Any, Callable, TypeVar, cast F = TypeVar('F', bound=Callable[..., Any]) # A decorator that preserves the signature. - def my_decorator(func: F) -> F: + def printing_decorator(func: F) -> F: def wrapper(*args, **kwds): print("Calling", func) return func(*args, **kwds) return cast(F, wrapper) -and that would enable the following type checks: - -.. code-block:: python + @printing_decorator + def add_forty_two(value: int) -> int: + return value + 42 - reveal_type(a) # str - add_forty_two('x') # Type check error: incompatible type "str"; expected "int" + a = add_forty_two(3) + reveal_type(a) # Revealed type is "builtins.int" + add_forty_two('x') # Argument 1 to "add_forty_two" has incompatible type "str"; expected "int" +This still has some shortcomings. First, we need to use the unsafe +:py:func:`~typing.cast` to convince mypy that ``wrapper()`` has the same +signature as ``func``. See :ref:`casts `. -Note that the ``wrapper()`` function is not type-checked. Wrapper -functions are typically small enough that this is not a big +Second, the ``wrapper()`` function is not tightly type checked, although +wrapper functions are typically small enough that this is not a big problem. This is also the reason for the :py:func:`~typing.cast` call in the -``return`` statement in ``my_decorator()``. See :ref:`casts `. However, -with the introduction of parameter specifications in mypy 0.940, we can now -have a more faithful type annotation: +``return`` statement in ``printing_decorator()``. + +However, we can use a parameter specification (:py:class:`~typing.ParamSpec`), +for a more faithful type annotation: .. code-block:: python - from typing import Callable, ParamSpec, TypeVar + from typing import Callable, TypeVar + from typing_extensions import ParamSpec P = ParamSpec('P') T = TypeVar('T') - def my_decorator(func: Callable[P, T]) -> Callable[P, T]: + def printing_decorator(func: Callable[P, T]) -> Callable[P, T]: def wrapper(*args: P.args, **kwds: P.kwargs) -> T: print("Calling", func) return func(*args, **kwds) return wrapper -When the decorator alters the signature, parameter specifications truly show their potential: +Parameter specifications also allow you to describe decorators that +alter the signature of the input function: .. code-block:: python - from typing import Callable, ParamSpec, TypeVar + from typing import Callable, TypeVar + from typing_extensions import ParamSpec P = ParamSpec('P') T = TypeVar('T') - # Note: We reuse 'P' in the return type, but replace 'T' with 'str' + # We reuse 'P' in the return type, but replace 'T' with 'str' def stringify(func: Callable[P, T]) -> Callable[P, str]: def wrapper(*args: P.args, **kwds: P.kwargs) -> str: return str(func(*args, **kwds)) @@ -640,9 +682,30 @@ When the decorator alters the signature, parameter specifications truly show the return value + 42 a = add_forty_two(3) - reveal_type(a) # str - foo('x') # Type check error: incompatible type "str"; expected "int" + reveal_type(a) # Revealed type is "builtins.str" + add_forty_two('x') # error: Argument 1 to "add_forty_two" has incompatible type "str"; expected "int" +Or insert an argument: + +.. code-block:: python + + from typing import Callable, TypeVar + from typing_extensions import Concatenate, ParamSpec + + P = ParamSpec('P') + T = TypeVar('T') + + def printing_decorator(func: Callable[P, T]) -> Callable[Concatenate[str, P], T]: + def wrapper(msg: str, /, *args: P.args, **kwds: P.kwargs) -> T: + print("Calling", func, "with", msg) + return func(*args, **kwds) + return wrapper + + @printing_decorator + def add_forty_two(value: int) -> int: + return value + 42 + + a = add_forty_two('three', 3) .. _decorator-factories: @@ -670,7 +733,7 @@ achieved by combining with :py:func:`@overload `: .. code-block:: python - from typing import Any, Callable, TypeVar, overload + from typing import Any, Callable, Optional, TypeVar, overload F = TypeVar('F', bound=Callable[..., Any]) @@ -682,7 +745,7 @@ achieved by combining with :py:func:`@overload `: def atomic(*, savepoint: bool = True) -> Callable[[F], F]: ... # Implementation - def atomic(__func: Callable[..., Any] = None, *, savepoint: bool = True): + def atomic(__func: Optional[Callable[..., Any]] = None, *, savepoint: bool = True): def decorator(func: Callable[..., Any]): ... # Code goes here if __func is not None: @@ -732,9 +795,8 @@ protocols mostly follow the normal rules for generic classes. Example: y: Box[int] = ... x = y # Error -- Box is invariant -Per :pep:`PEP 544: Generic protocols <544#generic-protocols>`, ``class -ClassName(Protocol[T])`` is allowed as a shorthand for ``class -ClassName(Protocol, Generic[T])``. +Note that ``class ClassName(Protocol[T])`` is allowed as a shorthand for +``class ClassName(Protocol, Generic[T])``, as per :pep:`PEP 544: Generic protocols <544#generic-protocols>`, The main difference between generic protocols and ordinary generic classes is that mypy checks that the declared variances of generic @@ -745,20 +807,18 @@ variable is invariant: .. code-block:: python - from typing import TypeVar - from typing_extensions import Protocol + from typing import Protocol, TypeVar T = TypeVar('T') - class ReadOnlyBox(Protocol[T]): # Error: covariant type variable expected + class ReadOnlyBox(Protocol[T]): # error: Invariant type variable "T" used in protocol where covariant one is expected def content(self) -> T: ... This example correctly uses a covariant type variable: .. code-block:: python - from typing import TypeVar - from typing_extensions import Protocol + from typing import Protocol, TypeVar T_co = TypeVar('T_co', covariant=True) @@ -783,16 +843,12 @@ Generic protocols can also be recursive. Example: class L: val: int + def next(self) -> 'L': ... - ... # details omitted - - def next(self) -> 'L': - ... # details omitted - - def last(seq: Linked[T]) -> T: - ... # implementation omitted + def last(seq: Linked[T]) -> T: ... - result = last(L()) # Inferred type of 'result' is 'int' + result = last(L()) + reveal_type(result) # Revealed type is "builtins.int" .. _generic-type-aliases: @@ -862,9 +918,60 @@ defeating the purpose of using aliases. Example: OIntVec = Optional[Vec[int]] -.. note:: +Using type variable bounds or values in generic aliases has the same effect +as in generic classes/functions. + + +Generic class internals +*********************** + +You may wonder what happens at runtime when you index a generic class. +Indexing returns a *generic alias* to the original class that returns instances +of the original class on instantiation: + +.. code-block:: python + + >>> from typing import TypeVar, Generic + >>> T = TypeVar('T') + >>> class Stack(Generic[T]): ... + >>> Stack + __main__.Stack + >>> Stack[int] + __main__.Stack[int] + >>> instance = Stack[int]() + >>> instance.__class__ + __main__.Stack - A type alias does not define a new type. For generic type aliases - this means that variance of type variables used for alias definition does not - apply to aliases. A parameterized generic alias is treated simply as an original - type with the corresponding type variables substituted. +Generic aliases can be instantiated or subclassed, similar to real +classes, but the above examples illustrate that type variables are +erased at runtime. Generic ``Stack`` instances are just ordinary +Python objects, and they have no extra runtime overhead or magic due +to being generic, other than a metaclass that overloads the indexing +operator. + +Note that in Python 3.8 and lower, the built-in types +:py:class:`list`, :py:class:`dict` and others do not support indexing. +This is why we have the aliases :py:class:`~typing.List`, +:py:class:`~typing.Dict` and so on in the :py:mod:`typing` +module. Indexing these aliases gives you a generic alias that +resembles generic aliases constructed by directly indexing the target +class in more recent versions of Python: + +.. code-block:: python + + >>> # Only relevant for Python 3.8 and below + >>> # For Python 3.9 onwards, prefer `list[int]` syntax + >>> from typing import List + >>> List[int] + typing.List[int] + +Note that the generic aliases in ``typing`` don't support constructing +instances: + +.. code-block:: python + + >>> from typing import List + >>> List[int]() + Traceback (most recent call last): + ... + TypeError: Type List cannot be instantiated; use list() instead diff --git a/docs/source/getting_started.rst b/docs/source/getting_started.rst index f55a54a0dd30..9b927097cfd2 100644 --- a/docs/source/getting_started.rst +++ b/docs/source/getting_started.rst @@ -6,15 +6,17 @@ Getting started This chapter introduces some core concepts of mypy, including function annotations, the :py:mod:`typing` module, stub files, and more. -Be sure to read this chapter carefully, as the rest of the documentation +If you're looking for a quick intro, see the +:ref:`mypy cheatsheet `. + +If you're unfamiliar with the concepts of static and dynamic type checking, +be sure to read this chapter carefully, as the rest of the documentation may not make much sense otherwise. Installing and running mypy *************************** -Mypy requires Python 3.6 or later to run. Once you've -`installed Python 3 `_, -install mypy using pip: +Mypy requires Python 3.7 or later to run. You can install mypy using pip: .. code-block:: shell @@ -31,16 +33,21 @@ out any errors it finds. Mypy will type check your code *statically*: this means that it will check for errors without ever running your code, just like a linter. -This means that you are always free to ignore the errors mypy reports and -treat them as just warnings, if you so wish: mypy runs independently from -Python itself. +This also means that you are always free to ignore the errors mypy reports, +if you so wish. You can always use the Python interpreter to run your code, +even if mypy reports errors. However, if you try directly running mypy on your existing Python code, it -will most likely report little to no errors: you must add *type annotations* -to your code to take full advantage of mypy. See the section below for details. +will most likely report little to no errors. This is a feature! It makes it +easy to adopt mypy incrementally. + +In order to get useful diagnostics from mypy, you must add *type annotations* +to your code. See the section below for details. + +.. _getting-started-dynamic-vs-static: -Function signatures and dynamic vs static typing -************************************************ +Dynamic vs static typing +************************ A function without type annotations is considered to be *dynamically typed* by mypy: @@ -52,22 +59,32 @@ A function without type annotations is considered to be *dynamically typed* by m By default, mypy will **not** type check dynamically typed functions. This means that with a few exceptions, mypy will not report any errors with regular unannotated Python. -This is the case even if you misuse the function: for example, mypy would currently -not report any errors if you tried running ``greeting(3)`` or ``greeting(b"Alice")`` -even though those function calls would result in errors at runtime. +This is the case even if you misuse the function! + +.. code-block:: python + + def greeting(name): + return 'Hello ' + name + + # These calls will fail when the program run, but mypy does not report an error + # because "greeting" does not have type annotations. + greeting(123) + greeting(b"Alice") -You can teach mypy to detect these kinds of bugs by adding *type annotations* (also -known as *type hints*). For example, you can teach mypy that ``greeting`` both accepts +We can get mypy to detect these kinds of bugs by adding *type annotations* (also +known as *type hints*). For example, you can tell mypy that ``greeting`` both accepts and returns a string like so: .. code-block:: python + # The "name: str" annotation says that the "name" argument should be a string + # The "-> str" annotation says that "greeting" will return a string def greeting(name: str) -> str: return 'Hello ' + name -This function is now *statically typed*: mypy can use the provided type hints to detect -incorrect usages of the ``greeting`` function. For example, it will reject the following -calls since the arguments have invalid types: +This function is now *statically typed*: mypy will use the provided type hints +to detect incorrect use of the ``greeting`` function and incorrect use of +variables within the ``greeting`` function. For example: .. code-block:: python @@ -76,9 +93,10 @@ calls since the arguments have invalid types: greeting(3) # Argument 1 to "greeting" has incompatible type "int"; expected "str" greeting(b'Alice') # Argument 1 to "greeting" has incompatible type "bytes"; expected "str" + greeting("World!") # No error -Note that this is all still valid Python 3 code! The function annotation syntax -shown above was added to Python :pep:`as a part of Python 3.0 <3107>`. + def bad_greeting(name: str) -> str: + return 'Hello ' * name # Unsupported operand types for * ("str" and "str") Being able to pick whether you want a function to be dynamically or statically typed can be very helpful. For example, if you are migrating an existing @@ -89,65 +107,35 @@ the code using dynamic typing and only add type hints later once the code is mor Once you are finished migrating or prototyping your code, you can make mypy warn you if you add a dynamic function by mistake by using the :option:`--disallow-untyped-defs ` -flag. See :ref:`command-line` for more information on configuring mypy. - -.. note:: +flag. You can also get mypy to provide some limited checking of dynamically typed +functions by using the :option:`--check-untyped-defs ` flag. +See :ref:`command-line` for more information on configuring mypy. - The earlier stages of analysis performed by mypy may report errors - even for dynamically typed functions. However, you should not rely - on this, as this may change in the future. +Strict mode and configuration +***************************** -More function signatures -************************ - -Here are a few more examples of adding type hints to function signatures. - -If a function does not explicitly return a value, give it a return -type of ``None``. Using a ``None`` result in a statically typed -context results in a type check error: - -.. code-block:: python - - def p() -> None: - print('hello') +Mypy has a *strict mode* that enables a number of additional checks, +like :option:`--disallow-untyped-defs `. - a = p() # Error: "p" does not return a value +If you run mypy with the :option:`--strict ` flag, you +will basically never get a type related error at runtime without a corresponding +mypy error, unless you explicitly circumvent mypy somehow. -Make sure to remember to include ``None``: if you don't, the function -will be dynamically typed. For example: +However, this flag will probably be too aggressive if you are trying +to add static types to a large, existing codebase. See :ref:`existing-code` +for suggestions on how to handle that case. -.. code-block:: python - - def f(): - 1 + 'x' # No static type error (dynamically typed) - - def g() -> None: - 1 + 'x' # Type check error (statically typed) - -Arguments with default values can be annotated like so: - -.. code-block:: python +Mypy is very configurable, so you can start with using ``--strict`` +and toggle off individual checks. For instance, if you use many third +party libraries that do not have types, +:option:`--ignore-missing-imports ` +may be useful. See :ref:`getting-to-strict` for how to build up to ``--strict``. - def greeting(name: str, excited: bool = False) -> str: - message = f'Hello, {name}' - if excited: - message += '!!!' - return message +See :ref:`command-line` and :ref:`config-file` for a complete reference on +configuration options. -``*args`` and ``**kwargs`` arguments can be annotated like so: - -.. code-block:: python - - def stars(*args: int, **kwargs: float) -> None: - # 'args' has type 'tuple[int, ...]' (a tuple of ints) - # 'kwargs' has type 'dict[str, float]' (a dict of strs to floats) - for arg in args: - print(arg) - for key, value in kwargs.items(): - print(key, value) - -Additional types, and the typing module -*************************************** +More complex types +****************** So far, we've added type hints that use only basic concrete types like ``str`` and ``float``. What if we want to express more complex types, @@ -173,28 +161,11 @@ accept one or more *type parameters*. In this case, we *parameterized* :py:class by writing ``list[str]``. This lets mypy know that ``greet_all`` accepts specifically lists containing strings, and not lists containing ints or any other type. -In Python 3.8 and earlier, you can instead import the -:py:class:`~typing.List` type from the :py:mod:`typing` module: - -.. code-block:: python - - from typing import List # Python 3.8 and earlier - - def greet_all(names: List[str]) -> None: - for name in names: - print('Hello ' + name) - - ... - -You can find many of these more complex static types in the :py:mod:`typing` module. - In the above examples, the type signature is perhaps a little too rigid. After all, there's no reason why this function must accept *specifically* a list -- it would run just fine if you were to pass in a tuple, a set, or any other custom iterable. -You can express this idea using the -:py:class:`collections.abc.Iterable` (or :py:class:`typing.Iterable` in Python -3.8 and earlier) type instead of :py:class:`list` : +You can express this idea using :py:class:`collections.abc.Iterable`: .. code-block:: python @@ -204,8 +175,19 @@ You can express this idea using the for name in names: print('Hello ' + name) +This behavior is actually a fundamental aspect of the PEP 484 type system: when +we annotate some variable with a type ``T``, we are actually telling mypy that +variable can be assigned an instance of ``T``, or an instance of a *subtype* of ``T``. +That is, ``list[str]`` is a subtype of ``Iterable[str]``. + +This also applies to inheritance, so if you have a class ``Child`` that inherits from +``Parent``, then a value of type ``Child`` can be assigned to a variable of type ``Parent``. +For example, a ``RuntimeError`` instance can be passed to a function that is annotated +as taking an ``Exception``. + As another example, suppose you want to write a function that can accept *either* -ints or strings, but no other types. You can express this using the :py:data:`~typing.Union` type: +ints or strings, but no other types. You can express this using the +:py:data:`~typing.Union` type. For example, ``int`` is a subtype of ``Union[int, str]``: .. code-block:: python @@ -217,26 +199,12 @@ ints or strings, but no other types. You can express this using the :py:data:`~t else: return user_id -Similarly, suppose that you want the function to accept only strings or ``None``. You can -again use :py:data:`~typing.Union` and use ``Union[str, None]`` -- or alternatively, use the type -``Optional[str]``. These two types are identical and interchangeable: ``Optional[str]`` -is just a shorthand or *alias* for ``Union[str, None]``. It exists mostly as a convenience -to help function signatures look a little cleaner: +The :py:mod:`typing` module contains many other useful types. -.. code-block:: python +For a quick overview, look through the :ref:`mypy cheatsheet `. - from typing import Optional - - def greeting(name: Optional[str] = None) -> str: - # Optional[str] means the same thing as Union[str, None] - if name is None: - name = 'stranger' - return 'Hello, ' + name - -The :py:mod:`typing` module contains many other useful types. You can find a -quick overview by looking through the :ref:`mypy cheatsheet ` -and a more detailed overview (including information on how to make your own -generic types or your own type aliases) by looking through the +For a detailed overview (including information on how to make your own +generic types or your own type aliases), look through the :ref:`type system reference `. .. note:: @@ -264,10 +232,7 @@ mypy will try and *infer* as many details as possible. We saw an example of this in the ``normalize_id`` function above -- mypy understands basic :py:func:`isinstance ` checks and so can infer that the ``user_id`` variable was of -type ``int`` in the if-branch and of type ``str`` in the else-branch. Similarly, mypy -was able to understand that ``name`` could not possibly be ``None`` in the ``greeting`` -function above, based both on the ``name is None`` check and the variable assignment -in that if statement. +type ``int`` in the if-branch and of type ``str`` in the else-branch. As another example, consider the following function. Mypy can type check this function without a problem: it will use the available context and deduce that ``output`` must be @@ -282,114 +247,16 @@ of type ``list[float]`` and that ``num`` must be of type ``float``: output.append(num) return output -Mypy will warn you if it is unable to determine the type of some variable -- -for example, when assigning an empty dictionary to some global value: - -.. code-block:: python - - my_global_dict = {} # Error: Need type annotation for "my_global_dict" +For more details, see :ref:`type-inference-and-annotations`. -You can teach mypy what type ``my_global_dict`` is meant to have by giving it -a type hint. For example, if you knew this variable is supposed to be a dict -of ints to floats, you could annotate it using either variable annotations -(introduced in Python 3.6 by :pep:`526`) or using a comment-based -syntax like so: - -.. code-block:: python - - # If you're using Python 3.9+ - my_global_dict: dict[int, float] = {} - - # If you're using Python 3.6+ - my_global_dict: Dict[int, float] = {} - - -Types and classes -***************** - -So far, we've only seen examples of pre-existing types like the ``int`` -or ``float`` builtins, or generic types from ``collections.abc`` and -``typing``, such as ``Iterable``. However, these aren't the only types you can -use: in fact, you can use any Python class as a type! - -For example, suppose you've defined a custom class representing a bank account: - -.. code-block:: python - - class BankAccount: - # Note: It is ok to omit type hints for the "self" parameter. - # Mypy will infer the correct type. - - def __init__(self, account_name: str, initial_balance: int = 0) -> None: - # Note: Mypy will infer the correct types of your fields - # based on the types of the parameters. - self.account_name = account_name - self.balance = initial_balance - - def deposit(self, amount: int) -> None: - self.balance += amount - - def withdraw(self, amount: int) -> None: - self.balance -= amount - - def overdrawn(self) -> bool: - return self.balance < 0 - -You can declare that a function will accept any instance of your class -by simply annotating the parameters with ``BankAccount``: - -.. code-block:: python - - def transfer(src: BankAccount, dst: BankAccount, amount: int) -> None: - src.withdraw(amount) - dst.deposit(amount) - - account_1 = BankAccount('Alice', 400) - account_2 = BankAccount('Bob', 200) - transfer(account_1, account_2, 50) - -In fact, the ``transfer`` function we wrote above can accept more then just -instances of ``BankAccount``: it can also accept any instance of a *subclass* -of ``BankAccount``. For example, suppose you write a new class that looks like this: - -.. code-block:: python - - class AuditedBankAccount(BankAccount): - def __init__(self, account_name: str, initial_balance: int = 0) -> None: - super().__init__(account_name, initial_balance) - self.audit_log: list[str] = [] - - def deposit(self, amount: int) -> None: - self.audit_log.append(f"Deposited {amount}") - self.balance += amount - - def withdraw(self, amount: int) -> None: - self.audit_log.append(f"Withdrew {amount}") - self.balance -= amount - -Since ``AuditedBankAccount`` is a subclass of ``BankAccount``, we can directly pass -in instances of it into our ``transfer`` function: - -.. code-block:: python - - audited = AuditedBankAccount('Charlie', 300) - transfer(account_1, audited, 100) # Type checks! - -This behavior is actually a fundamental aspect of the PEP 484 type system: when -we annotate some variable with a type ``T``, we are actually telling mypy that -variable can be assigned an instance of ``T``, or an instance of a *subclass* of ``T``. -The same rule applies to type hints on parameters or fields. - -See :ref:`class-basics` to learn more about how to work with code involving classes. - - -.. _stubs-intro: +Types from libraries +******************** -Stubs files and typeshed -************************ +Mypy can also understand how to work with types from libraries that you use. -Mypy also understands how to work with classes found in the standard library. -For example, here is a function which uses the ``Path`` object from the +For instance, mypy comes out of the box with an intimate knowledge of the +Python standard library. For example, here is a function which uses the +``Path`` object from the `pathlib standard library module `_: .. code-block:: python @@ -397,74 +264,43 @@ For example, here is a function which uses the ``Path`` object from the from pathlib import Path def load_template(template_path: Path, name: str) -> str: - # Mypy understands that 'file_path.read_text()' returns a str... + # Mypy knows that `file_path` has a `read_text` method that returns a str template = template_path.read_text() - - # ...so understands this line type checks. + # ...so it understands this line type checks return template.replace('USERNAME', name) -This behavior may surprise you if you're familiar with how -Python internally works. The standard library does not use type hints -anywhere, so how did mypy know that ``Path.read_text()`` returns a ``str``, -or that ``str.replace(...)`` accepts exactly two ``str`` arguments? +If a third party library you use :ref:`declares support for type checking `, +mypy will type check your use of that library based on the type hints +it contains. -The answer is that mypy comes bundled with *stub files* from the -the `typeshed `_ project, which -contains stub files for the Python builtins, the standard library, -and selected third-party packages. - -A *stub file* is a file containing a skeleton of the public interface -of that Python module, including classes, variables, functions -- and -most importantly, their types. - -Mypy complains if it can't find a stub (or a real module) for a -library module that you import. Some modules ship with stubs or inline -annotations that mypy can automatically find, or you can install -additional stubs using pip (see :ref:`fix-missing-imports` and -:ref:`installed-packages` for the details). For example, you can install -the stubs for the ``requests`` package like this: - -.. code-block:: shell - - $ python3 -m pip install types-requests - -The stubs are usually packaged in a distribution named -``types-``. Note that the distribution name may be -different from the name of the package that you import. For example, -``types-PyYAML`` contains stubs for the ``yaml`` package. Mypy can -often suggest the name of the stub distribution: +However, if the third party library does not have type hints, mypy will +complain about missing type information. .. code-block:: text - prog.py:1: error: Library stubs not installed for "yaml" (or incompatible with Python 3.8) + prog.py:1: error: Library stubs not installed for "yaml" prog.py:1: note: Hint: "python3 -m pip install types-PyYAML" + prog.py:2: error: Library stubs not installed for "requests" + prog.py:2: note: Hint: "python3 -m pip install types-requests" ... -You can also :ref:`create -stubs ` easily. We discuss strategies for handling errors -about missing stubs in :ref:`ignore-missing-imports`. +In this case, you can provide mypy a different source of type information, +by installing a *stub* package. A stub package is a package that contains +type hints for another library, but no actual code. -Configuring mypy -**************** +.. code-block:: shell -Mypy supports many command line options that you can use to tweak how -mypy behaves: see :ref:`command-line` for more details. + $ python3 -m pip install types-PyYAML types-requests -For example, suppose you want to make sure *all* functions within your -codebase are using static typing and make mypy report an error if you -add a dynamically-typed function by mistake. You can make mypy do this -by running mypy with the :option:`--disallow-untyped-defs ` flag. +Stubs packages for a distribution are often named ``types-``. +Note that a distribution name may be different from the name of the package that +you import. For example, ``types-PyYAML`` contains stubs for the ``yaml`` +package. -Another potentially useful flag is :option:`--strict `, which enables many -(though not all) of the available strictness options -- including -:option:`--disallow-untyped-defs `. +For more discussion on strategies for handling errors about libraries without +type information, refer to :ref:`fix-missing-imports`. -This flag is mostly useful if you're starting a new project from scratch -and want to maintain a high degree of type safety from day one. However, -this flag will probably be too aggressive if you either plan on using -many untyped third party libraries or are trying to add static types to -a large, existing codebase. See :ref:`existing-code` for more suggestions -on how to handle the latter case. +For more information about stubs, see :ref:`stub-files`. Next steps ********** @@ -499,5 +335,8 @@ resources: `mypy issue tracker `_ and typing `Gitter chat `_. +* For general questions about Python typing, try posting at + `typing discussions `_. + You can also continue reading this document and skip sections that aren't relevant for you. You don't need to read sections in order. diff --git a/docs/source/index.rst b/docs/source/index.rst index 1cd16ff60af9..7ab3edebad39 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -6,35 +6,43 @@ Welcome to mypy documentation! ============================== -Mypy is a static type checker for Python 3. If you sprinkle -your code with type annotations, mypy can type check your code and find common -bugs. As mypy is a static analyzer, or a lint-like tool, the type -annotations are just hints for mypy and don't interfere when running your program. -You run your program with a standard Python interpreter, and the annotations -are treated effectively as comments. - -Using the Python 3 annotation syntax (using :pep:`484` and :pep:`526` notation), -you will be able to -efficiently annotate your code and use mypy to check the code for common errors. -Mypy has a powerful and easy-to-use type system with modern features such as -type inference, generics, callable types, tuple types, union types, and -structural subtyping. - -As a developer, you decide how to use mypy in your workflow. You can always -escape to dynamic typing as mypy's approach to static typing doesn't restrict -what you can do in your programs. Using mypy will make your programs easier to -understand, debug, and maintain. +Mypy is a static type checker for Python. + +Type checkers help ensure that you're using variables and functions in your code +correctly. With mypy, add type hints (:pep:`484`) +to your Python programs, and mypy will warn you when you use those types +incorrectly. + +Python is a dynamic language, so usually you'll only see errors in your code +when you attempt to run it. Mypy is a *static* checker, so it finds bugs +in your programs without even running them! + +Here is a small example to whet your appetite: + +.. code-block:: python + + number = input("What is your favourite number?") + print("It is", number + 1) # error: Unsupported operand types for + ("str" and "int") -This documentation provides a short introduction to mypy. It will help you -get started writing statically typed code. Knowledge of Python and a -statically typed object-oriented language, such as Java, are assumed. +Adding type hints for mypy does not interfere with the way your program would +otherwise run. Think of type hints as similar to comments! You can always use +the Python interpreter to run your code, even if mypy reports errors. + +Mypy is designed with gradual typing in mind. This means you can add type +hints to your code base slowly and that you can always fall back to dynamic +typing when static typing is not convenient. + +Mypy has a powerful and easy-to-use type system, supporting features such as +type inference, generics, callable types, tuple types, union types, +structural subtyping and more. Using mypy will make your programs easier to +understand, debug, and maintain. .. note:: - Mypy is used in production by many companies and projects, but mypy is - officially beta software. There will be occasional changes + Although mypy is production ready, there may be occasional changes that break backward compatibility. The mypy development team tries to - minimize the impact of changes to user code. + minimize the impact of changes to user code. In case of a major breaking + change, mypy's major version will be bumped. Contents -------- @@ -44,8 +52,8 @@ Contents :caption: First steps getting_started - existing_code cheat_sheet_py3 + existing_code .. _overview-type-system-reference: @@ -66,6 +74,7 @@ Contents generics more_types literal_types + typed_dict final_attrs metaclasses diff --git a/docs/source/installed_packages.rst b/docs/source/installed_packages.rst index d439fe4dc3a6..b9a3b891c99c 100644 --- a/docs/source/installed_packages.rst +++ b/docs/source/installed_packages.rst @@ -57,10 +57,10 @@ stubs.) If you have installed typed packages in another Python installation or environment, mypy won't automatically find them. One option is to install another copy of those packages in the environment in which you -use to run mypy. Alternatively, you can use the +installed mypy. Alternatively, you can use the :option:`--python-executable ` flag to point -to the target Python executable, and mypy will find packages installed -for that Python executable. +to the Python executable for another environment, and mypy will find +packages installed for that Python executable. Note that mypy does not support some more advanced import features, such as zip imports and custom import hooks. diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst index b9ddaf88ad74..b575a6eac4c5 100644 --- a/docs/source/kinds_of_types.rst +++ b/docs/source/kinds_of_types.rst @@ -388,12 +388,8 @@ case you should add an explicit ``Optional[...]`` annotation (or type comment). .. note:: ``Optional[...]`` *does not* mean a function argument with a default value. - However, if the default value of an argument is ``None``, you can use - an optional type for the argument, but it's not enforced by default. - You can use the :option:`--no-implicit-optional ` command-line option to stop - treating arguments with a ``None`` default value as having an implicit - ``Optional[...]`` type. It's possible that this will become the default - behavior in the future. + It simply means that ``None`` is a valid value for the argument. This is + a common confusion because ``None`` is a common default value for arguments. .. _alternative_union_syntax: diff --git a/docs/source/literal_types.rst b/docs/source/literal_types.rst index 7195ccc2b69b..a66d300bd0fd 100644 --- a/docs/source/literal_types.rst +++ b/docs/source/literal_types.rst @@ -495,13 +495,13 @@ the same way Python's runtime does: ... right = 'right' Traceback (most recent call last): ... - TypeError: Other: cannot extend enumeration 'Some' + TypeError: AllDirection: cannot extend enumeration 'Direction' Mypy also catches this error: .. code-block:: python - class AllDirection(Direction): # E: Cannot inherit from final class "Some" + class AllDirection(Direction): # E: Cannot inherit from final class "Direction" left = 'left' right = 'right' diff --git a/docs/source/metaclasses.rst b/docs/source/metaclasses.rst index a5d16aa722fd..396d7dbb42cc 100644 --- a/docs/source/metaclasses.rst +++ b/docs/source/metaclasses.rst @@ -72,12 +72,15 @@ so it's better not to combine metaclasses and class hierarchies: class A1(metaclass=M1): pass class A2(metaclass=M2): pass - class B1(A1, metaclass=M2): pass # Mypy Error: Inconsistent metaclass structure for "B1" + class B1(A1, metaclass=M2): pass # Mypy Error: metaclass conflict # At runtime the above definition raises an exception # TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases - # Same runtime error as in B1, but mypy does not catch it yet - class B12(A1, A2): pass + class B12(A1, A2): pass # Mypy Error: metaclass conflict + + # This can be solved via a common metaclass subtype: + class CorrectMeta(M1, M2): pass + class B2(A1, A2, metaclass=CorrectMeta): pass # OK, runtime is also OK * Mypy does not understand dynamically-computed metaclasses, such as ``class A(metaclass=f()): ...`` diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index 722909a038b5..ff5e8d384351 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -2,7 +2,7 @@ More types ========== This section introduces a few additional kinds of types, including :py:data:`~typing.NoReturn`, -:py:func:`NewType `, ``TypedDict``, and types for async code. It also discusses +:py:func:`NewType `, and types for async code. It also discusses how to give functions more precise types using overloads. All of these are only situationally useful, so feel free to skip this section and come back when you have a need for some of them. @@ -20,9 +20,6 @@ Here's a quick summary of what's covered here: signatures. This is useful if you need to encode a relationship between the arguments and the return type that would be difficult to express normally. -* ``TypedDict`` lets you give precise types for dictionaries that represent - objects with a fixed schema, such as ``{'id': 1, 'items': ['x']}``. - * Async types let you type check programs using ``async`` and ``await``. .. _noreturn: @@ -949,252 +946,3 @@ generator type as the return type: loop = asyncio.get_event_loop() loop.run_until_complete(countdown_2("USS Enterprise", 5)) loop.close() - - -.. _typeddict: - -TypedDict -********* - -Python programs often use dictionaries with string keys to represent objects. -Here is a typical example: - -.. code-block:: python - - movie = {'name': 'Blade Runner', 'year': 1982} - -Only a fixed set of string keys is expected (``'name'`` and -``'year'`` above), and each key has an independent value type (``str`` -for ``'name'`` and ``int`` for ``'year'`` above). We've previously -seen the ``dict[K, V]`` type, which lets you declare uniform -dictionary types, where every value has the same type, and arbitrary keys -are supported. This is clearly not a good fit for -``movie`` above. Instead, you can use a ``TypedDict`` to give a precise -type for objects like ``movie``, where the type of each -dictionary value depends on the key: - -.. code-block:: python - - from typing_extensions import TypedDict - - Movie = TypedDict('Movie', {'name': str, 'year': int}) - - movie: Movie = {'name': 'Blade Runner', 'year': 1982} - -``Movie`` is a ``TypedDict`` type with two items: ``'name'`` (with type ``str``) -and ``'year'`` (with type ``int``). Note that we used an explicit type -annotation for the ``movie`` variable. This type annotation is -important -- without it, mypy will try to infer a regular, uniform -:py:class:`dict` type for ``movie``, which is not what we want here. - -.. note:: - - If you pass a ``TypedDict`` object as an argument to a function, no - type annotation is usually necessary since mypy can infer the - desired type based on the declared argument type. Also, if an - assignment target has been previously defined, and it has a - ``TypedDict`` type, mypy will treat the assigned value as a ``TypedDict``, - not :py:class:`dict`. - -Now mypy will recognize these as valid: - -.. code-block:: python - - name = movie['name'] # Okay; type of name is str - year = movie['year'] # Okay; type of year is int - -Mypy will detect an invalid key as an error: - -.. code-block:: python - - director = movie['director'] # Error: 'director' is not a valid key - -Mypy will also reject a runtime-computed expression as a key, as -it can't verify that it's a valid key. You can only use string -literals as ``TypedDict`` keys. - -The ``TypedDict`` type object can also act as a constructor. It -returns a normal :py:class:`dict` object at runtime -- a ``TypedDict`` does -not define a new runtime type: - -.. code-block:: python - - toy_story = Movie(name='Toy Story', year=1995) - -This is equivalent to just constructing a dictionary directly using -``{ ... }`` or ``dict(key=value, ...)``. The constructor form is -sometimes convenient, since it can be used without a type annotation, -and it also makes the type of the object explicit. - -Like all types, ``TypedDict``\s can be used as components to build -arbitrarily complex types. For example, you can define nested -``TypedDict``\s and containers with ``TypedDict`` items. -Unlike most other types, mypy uses structural compatibility checking -(or structural subtyping) with ``TypedDict``\s. A ``TypedDict`` object with -extra items is compatible with (a subtype of) a narrower -``TypedDict``, assuming item types are compatible (*totality* also affects -subtyping, as discussed below). - -A ``TypedDict`` object is not a subtype of the regular ``dict[...]`` -type (and vice versa), since :py:class:`dict` allows arbitrary keys to be -added and removed, unlike ``TypedDict``. However, any ``TypedDict`` object is -a subtype of (that is, compatible with) ``Mapping[str, object]``, since -:py:class:`~typing.Mapping` only provides read-only access to the dictionary items: - -.. code-block:: python - - def print_typed_dict(obj: Mapping[str, object]) -> None: - for key, value in obj.items(): - print(f'{key}: {value}') - - print_typed_dict(Movie(name='Toy Story', year=1995)) # OK - -.. note:: - - Unless you are on Python 3.8 or newer (where ``TypedDict`` is available in - standard library :py:mod:`typing` module) you need to install ``typing_extensions`` - using pip to use ``TypedDict``: - - .. code-block:: text - - python3 -m pip install --upgrade typing-extensions - -Totality --------- - -By default mypy ensures that a ``TypedDict`` object has all the specified -keys. This will be flagged as an error: - -.. code-block:: python - - # Error: 'year' missing - toy_story: Movie = {'name': 'Toy Story'} - -Sometimes you want to allow keys to be left out when creating a -``TypedDict`` object. You can provide the ``total=False`` argument to -``TypedDict(...)`` to achieve this: - -.. code-block:: python - - GuiOptions = TypedDict( - 'GuiOptions', {'language': str, 'color': str}, total=False) - options: GuiOptions = {} # Okay - options['language'] = 'en' - -You may need to use :py:meth:`~dict.get` to access items of a partial (non-total) -``TypedDict``, since indexing using ``[]`` could fail at runtime. -However, mypy still lets use ``[]`` with a partial ``TypedDict`` -- you -just need to be careful with it, as it could result in a :py:exc:`KeyError`. -Requiring :py:meth:`~dict.get` everywhere would be too cumbersome. (Note that you -are free to use :py:meth:`~dict.get` with total ``TypedDict``\s as well.) - -Keys that aren't required are shown with a ``?`` in error messages: - -.. code-block:: python - - # Revealed type is "TypedDict('GuiOptions', {'language'?: builtins.str, - # 'color'?: builtins.str})" - reveal_type(options) - -Totality also affects structural compatibility. You can't use a partial -``TypedDict`` when a total one is expected. Also, a total ``TypedDict`` is not -valid when a partial one is expected. - -Supported operations --------------------- - -``TypedDict`` objects support a subset of dictionary operations and methods. -You must use string literals as keys when calling most of the methods, -as otherwise mypy won't be able to check that the key is valid. List -of supported operations: - -* Anything included in :py:class:`~typing.Mapping`: - - * ``d[key]`` - * ``key in d`` - * ``len(d)`` - * ``for key in d`` (iteration) - * :py:meth:`d.get(key[, default]) ` - * :py:meth:`d.keys() ` - * :py:meth:`d.values() ` - * :py:meth:`d.items() ` - -* :py:meth:`d.copy() ` -* :py:meth:`d.setdefault(key, default) ` -* :py:meth:`d1.update(d2) ` -* :py:meth:`d.pop(key[, default]) ` (partial ``TypedDict``\s only) -* ``del d[key]`` (partial ``TypedDict``\s only) - -.. note:: - - :py:meth:`~dict.clear` and :py:meth:`~dict.popitem` are not supported since they are unsafe - -- they could delete required ``TypedDict`` items that are not visible to - mypy because of structural subtyping. - -Class-based syntax ------------------- - -An alternative, class-based syntax to define a ``TypedDict`` is supported -in Python 3.6 and later: - -.. code-block:: python - - from typing_extensions import TypedDict - - class Movie(TypedDict): - name: str - year: int - -The above definition is equivalent to the original ``Movie`` -definition. It doesn't actually define a real class. This syntax also -supports a form of inheritance -- subclasses can define additional -items. However, this is primarily a notational shortcut. Since mypy -uses structural compatibility with ``TypedDict``\s, inheritance is not -required for compatibility. Here is an example of inheritance: - -.. code-block:: python - - class Movie(TypedDict): - name: str - year: int - - class BookBasedMovie(Movie): - based_on: str - -Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``. - -Mixing required and non-required items --------------------------------------- - -In addition to allowing reuse across ``TypedDict`` types, inheritance also allows -you to mix required and non-required (using ``total=False``) items -in a single ``TypedDict``. Example: - -.. code-block:: python - - class MovieBase(TypedDict): - name: str - year: int - - class Movie(MovieBase, total=False): - based_on: str - -Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on`` -can be left out when constructing an object. A ``TypedDict`` with a mix of required -and non-required keys, such as ``Movie`` above, will only be compatible with -another ``TypedDict`` if all required keys in the other ``TypedDict`` are required keys in the -first ``TypedDict``, and all non-required keys of the other ``TypedDict`` are also non-required keys -in the first ``TypedDict``. - -Unions of TypedDicts --------------------- - -Since TypedDicts are really just regular dicts at runtime, it is not possible to -use ``isinstance`` checks to distinguish between different variants of a Union of -TypedDict in the same way you can with regular objects. - -Instead, you can use the :ref:`tagged union pattern `. The referenced -section of the docs has a full description with an example, but in short, you will -need to give each TypedDict the same key where each value has a unique -:ref:`Literal type `. Then, check that key to distinguish -between your TypedDicts. diff --git a/docs/source/protocols.rst b/docs/source/protocols.rst index 48530310c8cb..cb51809a66d5 100644 --- a/docs/source/protocols.rst +++ b/docs/source/protocols.rst @@ -4,14 +4,17 @@ Protocols and structural subtyping ================================== Mypy supports two ways of deciding whether two classes are compatible -as types: nominal subtyping and structural subtyping. *Nominal* -subtyping is strictly based on the class hierarchy. If class ``D`` +as types: nominal subtyping and structural subtyping. + +*Nominal* subtyping is strictly based on the class hierarchy. If class ``D`` inherits class ``C``, it's also a subtype of ``C``, and instances of ``D`` can be used when ``C`` instances are expected. This form of subtyping is used by default in mypy, since it's easy to understand and produces clear and concise error messages, and since it matches how the native :py:func:`isinstance ` check works -- based on class -hierarchy. *Structural* subtyping can also be useful. Class ``D`` is +hierarchy. + +*Structural* subtyping is based on the operations that can be performed with an object. Class ``D`` is a structural subtype of class ``C`` if the former has all attributes and methods of the latter, and with compatible types. @@ -55,11 +58,292 @@ For example, ``IntList`` below is iterable, over ``int`` values: print_numbered(x) # OK print_numbered([4, 5]) # Also OK -The subsections below introduce all built-in protocols defined in +:ref:`predefined_protocols_reference` lists all protocols defined in :py:mod:`typing` and the signatures of the corresponding methods you need to define to implement each protocol (the signatures can be left out, as always, but mypy won't type check unannotated methods). +Simple user-defined protocols +***************************** + +You can define your own protocol class by inheriting the special ``Protocol`` +class: + +.. code-block:: python + + from typing import Iterable + from typing_extensions import Protocol + + class SupportsClose(Protocol): + # Empty method body (explicit '...') + def close(self) -> None: ... + + class Resource: # No SupportsClose base class! + + def close(self) -> None: + self.resource.release() + + # ... other methods ... + + def close_all(items: Iterable[SupportsClose]) -> None: + for item in items: + item.close() + + close_all([Resource(), open('some/file')]) # Okay! + +``Resource`` is a subtype of the ``SupportsClose`` protocol since it defines +a compatible ``close`` method. Regular file objects returned by :py:func:`open` are +similarly compatible with the protocol, as they support ``close()``. + +.. note:: + + The ``Protocol`` base class is provided in the ``typing_extensions`` + package for Python 3.4-3.7. Starting with Python 3.8, ``Protocol`` + is included in the ``typing`` module. + +Defining subprotocols and subclassing protocols +*********************************************** + +You can also define subprotocols. Existing protocols can be extended +and merged using multiple inheritance. Example: + +.. code-block:: python + + # ... continuing from the previous example + + class SupportsRead(Protocol): + def read(self, amount: int) -> bytes: ... + + class TaggedReadableResource(SupportsClose, SupportsRead, Protocol): + label: str + + class AdvancedResource(Resource): + def __init__(self, label: str) -> None: + self.label = label + + def read(self, amount: int) -> bytes: + # some implementation + ... + + resource: TaggedReadableResource + resource = AdvancedResource('handle with care') # OK + +Note that inheriting from an existing protocol does not automatically +turn the subclass into a protocol -- it just creates a regular +(non-protocol) class or ABC that implements the given protocol (or +protocols). The ``Protocol`` base class must always be explicitly +present if you are defining a protocol: + +.. code-block:: python + + class NotAProtocol(SupportsClose): # This is NOT a protocol + new_attr: int + + class Concrete: + new_attr: int = 0 + + def close(self) -> None: + ... + + # Error: nominal subtyping used by default + x: NotAProtocol = Concrete() # Error! + +You can also include default implementations of methods in +protocols. If you explicitly subclass these protocols you can inherit +these default implementations. + +Explicitly including a protocol as a +base class is also a way of documenting that your class implements a +particular protocol, and it forces mypy to verify that your class +implementation is actually compatible with the protocol. In particular, +omitting a value for an attribute or a method body will make it implicitly +abstract: + +.. code-block:: python + + class SomeProto(Protocol): + attr: int # Note, no right hand side + def method(self) -> str: ... # Literally just ... here + + class ExplicitSubclass(SomeProto): + pass + + ExplicitSubclass() # error: Cannot instantiate abstract class 'ExplicitSubclass' + # with abstract attributes 'attr' and 'method' + +Invariance of protocol attributes +********************************* + +A common issue with protocols is that protocol attributes are invariant. +For example: + +.. code-block:: python + + class Box(Protocol): + content: object + + class IntBox: + content: int + + def takes_box(box: Box) -> None: ... + + takes_box(IntBox()) # error: Argument 1 to "takes_box" has incompatible type "IntBox"; expected "Box" + # note: Following member(s) of "IntBox" have conflicts: + # note: content: expected "object", got "int" + +This is because ``Box`` defines ``content`` as a mutable attribute. +Here's why this is problematic: + +.. code-block:: python + + def takes_box_evil(box: Box) -> None: + box.content = "asdf" # This is bad, since box.content is supposed to be an object + + my_int_box = IntBox() + takes_box_evil(my_int_box) + my_int_box.content + 1 # Oops, TypeError! + +This can be fixed by declaring ``content`` to be read-only in the ``Box`` +protocol using ``@property``: + +.. code-block:: python + + class Box(Protocol): + @property + def content(self) -> object: ... + + class IntBox: + content: int + + def takes_box(box: Box) -> None: ... + + takes_box(IntBox(42)) # OK + +Recursive protocols +******************* + +Protocols can be recursive (self-referential) and mutually +recursive. This is useful for declaring abstract recursive collections +such as trees and linked lists: + +.. code-block:: python + + from typing import TypeVar, Optional + from typing_extensions import Protocol + + class TreeLike(Protocol): + value: int + + @property + def left(self) -> Optional['TreeLike']: ... + + @property + def right(self) -> Optional['TreeLike']: ... + + class SimpleTree: + def __init__(self, value: int) -> None: + self.value = value + self.left: Optional['SimpleTree'] = None + self.right: Optional['SimpleTree'] = None + + root: TreeLike = SimpleTree(0) # OK + +Using isinstance() with protocols +********************************* + +You can use a protocol class with :py:func:`isinstance` if you decorate it +with the ``@runtime_checkable`` class decorator. The decorator adds +rudimentary support for runtime structural checks: + +.. code-block:: python + + from typing_extensions import Protocol, runtime_checkable + + @runtime_checkable + class Portable(Protocol): + handles: int + + class Mug: + def __init__(self) -> None: + self.handles = 1 + + def use(handles: int) -> None: ... + + mug = Mug() + if isinstance(mug, Portable): # Works at runtime! + use(mug.handles) + +:py:func:`isinstance` also works with the :ref:`predefined protocols ` +in :py:mod:`typing` such as :py:class:`~typing.Iterable`. + +.. warning:: + :py:func:`isinstance` with protocols is not completely safe at runtime. + For example, signatures of methods are not checked. The runtime + implementation only checks that all protocol members exist, + not that they have the correct type. :py:func:`issubclass` with protocols + will only check for the existence of methods. + +.. note:: + :py:func:`isinstance` with protocols can also be surprisingly slow. + In many cases, you're better served by using :py:func:`hasattr` to + check for the presence of attributes. + +.. _callback_protocols: + +Callback protocols +****************** + +Protocols can be used to define flexible callback types that are hard +(or even impossible) to express using the :py:data:`Callable[...] ` syntax, such as variadic, +overloaded, and complex generic callbacks. They are defined with a special :py:meth:`__call__ ` +member: + +.. code-block:: python + + from typing import Optional, Iterable + from typing_extensions import Protocol + + class Combiner(Protocol): + def __call__(self, *vals: bytes, maxlen: Optional[int] = None) -> list[bytes]: ... + + def batch_proc(data: Iterable[bytes], cb_results: Combiner) -> bytes: + for item in data: + ... + + def good_cb(*vals: bytes, maxlen: Optional[int] = None) -> list[bytes]: + ... + def bad_cb(*vals: bytes, maxitems: Optional[int]) -> list[bytes]: + ... + + batch_proc([], good_cb) # OK + batch_proc([], bad_cb) # Error! Argument 2 has incompatible type because of + # different name and kind in the callback + +Callback protocols and :py:data:`~typing.Callable` types can be used interchangeably. +Argument names in :py:meth:`__call__ ` methods must be identical, unless +a double underscore prefix is used. For example: + +.. code-block:: python + + from typing import Callable, TypeVar + from typing_extensions import Protocol + + T = TypeVar('T') + + class Copy(Protocol): + def __call__(self, __origin: T) -> T: ... + + copy_a: Callable[[T], T] + copy_b: Copy + + copy_a = copy_b # OK + copy_b = copy_a # Also OK + +.. _predefined_protocols_reference: + +Predefined protocol reference +***************************** + Iteration protocols ................... @@ -283,207 +567,3 @@ AsyncContextManager[T] traceback: Optional[TracebackType]) -> Awaitable[Optional[bool]] See also :py:class:`~typing.AsyncContextManager`. - -Simple user-defined protocols -***************************** - -You can define your own protocol class by inheriting the special ``Protocol`` -class: - -.. code-block:: python - - from typing import Iterable - from typing_extensions import Protocol - - class SupportsClose(Protocol): - def close(self) -> None: - ... # Empty method body (explicit '...') - - class Resource: # No SupportsClose base class! - # ... some methods ... - - def close(self) -> None: - self.resource.release() - - def close_all(items: Iterable[SupportsClose]) -> None: - for item in items: - item.close() - - close_all([Resource(), open('some/file')]) # Okay! - -``Resource`` is a subtype of the ``SupportsClose`` protocol since it defines -a compatible ``close`` method. Regular file objects returned by :py:func:`open` are -similarly compatible with the protocol, as they support ``close()``. - -.. note:: - - The ``Protocol`` base class is provided in the ``typing_extensions`` - package for Python 3.4-3.7. Starting with Python 3.8, ``Protocol`` - is included in the ``typing`` module. - -Defining subprotocols and subclassing protocols -*********************************************** - -You can also define subprotocols. Existing protocols can be extended -and merged using multiple inheritance. Example: - -.. code-block:: python - - # ... continuing from the previous example - - class SupportsRead(Protocol): - def read(self, amount: int) -> bytes: ... - - class TaggedReadableResource(SupportsClose, SupportsRead, Protocol): - label: str - - class AdvancedResource(Resource): - def __init__(self, label: str) -> None: - self.label = label - - def read(self, amount: int) -> bytes: - # some implementation - ... - - resource: TaggedReadableResource - resource = AdvancedResource('handle with care') # OK - -Note that inheriting from an existing protocol does not automatically -turn the subclass into a protocol -- it just creates a regular -(non-protocol) class or ABC that implements the given protocol (or -protocols). The ``Protocol`` base class must always be explicitly -present if you are defining a protocol: - -.. code-block:: python - - class NotAProtocol(SupportsClose): # This is NOT a protocol - new_attr: int - - class Concrete: - new_attr: int = 0 - - def close(self) -> None: - ... - - # Error: nominal subtyping used by default - x: NotAProtocol = Concrete() # Error! - -You can also include default implementations of methods in -protocols. If you explicitly subclass these protocols you can inherit -these default implementations. Explicitly including a protocol as a -base class is also a way of documenting that your class implements a -particular protocol, and it forces mypy to verify that your class -implementation is actually compatible with the protocol. - -Recursive protocols -******************* - -Protocols can be recursive (self-referential) and mutually -recursive. This is useful for declaring abstract recursive collections -such as trees and linked lists: - -.. code-block:: python - - from typing import TypeVar, Optional - from typing_extensions import Protocol - - class TreeLike(Protocol): - value: int - - @property - def left(self) -> Optional['TreeLike']: ... - - @property - def right(self) -> Optional['TreeLike']: ... - - class SimpleTree: - def __init__(self, value: int) -> None: - self.value = value - self.left: Optional['SimpleTree'] = None - self.right: Optional['SimpleTree'] = None - - root: TreeLike = SimpleTree(0) # OK - -Using isinstance() with protocols -********************************* - -You can use a protocol class with :py:func:`isinstance` if you decorate it -with the ``@runtime_checkable`` class decorator. The decorator adds -support for basic runtime structural checks: - -.. code-block:: python - - from typing_extensions import Protocol, runtime_checkable - - @runtime_checkable - class Portable(Protocol): - handles: int - - class Mug: - def __init__(self) -> None: - self.handles = 1 - - def use(handles: int) -> None: ... - - mug = Mug() - if isinstance(mug, Portable): - use(mug.handles) # Works statically and at runtime - -:py:func:`isinstance` also works with the :ref:`predefined protocols ` -in :py:mod:`typing` such as :py:class:`~typing.Iterable`. - -.. note:: - :py:func:`isinstance` with protocols is not completely safe at runtime. - For example, signatures of methods are not checked. The runtime - implementation only checks that all protocol members are defined. - -.. _callback_protocols: - -Callback protocols -****************** - -Protocols can be used to define flexible callback types that are hard -(or even impossible) to express using the :py:data:`Callable[...] ` syntax, such as variadic, -overloaded, and complex generic callbacks. They are defined with a special :py:meth:`__call__ ` -member: - -.. code-block:: python - - from typing import Optional, Iterable - from typing_extensions import Protocol - - class Combiner(Protocol): - def __call__(self, *vals: bytes, maxlen: Optional[int] = None) -> list[bytes]: ... - - def batch_proc(data: Iterable[bytes], cb_results: Combiner) -> bytes: - for item in data: - ... - - def good_cb(*vals: bytes, maxlen: Optional[int] = None) -> list[bytes]: - ... - def bad_cb(*vals: bytes, maxitems: Optional[int]) -> list[bytes]: - ... - - batch_proc([], good_cb) # OK - batch_proc([], bad_cb) # Error! Argument 2 has incompatible type because of - # different name and kind in the callback - -Callback protocols and :py:data:`~typing.Callable` types can be used interchangeably. -Argument names in :py:meth:`__call__ ` methods must be identical, unless -a double underscore prefix is used. For example: - -.. code-block:: python - - from typing import Callable, TypeVar - from typing_extensions import Protocol - - T = TypeVar('T') - - class Copy(Protocol): - def __call__(self, __origin: T) -> T: ... - - copy_a: Callable[[T], T] - copy_b: Copy - - copy_a = copy_b # OK - copy_b = copy_a # Also OK diff --git a/docs/source/running_mypy.rst b/docs/source/running_mypy.rst index 8e5547ffd374..b0cefec9dafa 100644 --- a/docs/source/running_mypy.rst +++ b/docs/source/running_mypy.rst @@ -26,10 +26,6 @@ Specifying code to be checked Mypy lets you specify what files it should type check in several different ways. -Note that if you use namespace packages (in particular, packages without -``__init__.py``), you'll need to specify :option:`--namespace-packages `. - 1. First, you can pass in paths to Python files and directories you want to type check. For example:: @@ -83,6 +79,9 @@ Note that if you use namespace packages (in particular, packages without ...will type check the above string as a mini-program (and in this case, will report that ``list[int]`` is not callable). +You can also use the :confval:`files` option in your :file:`mypy.ini` file to specify which +files to check, in which case you can simply run ``mypy`` with no arguments. + Reading a list of files from a file *********************************** @@ -104,6 +103,82 @@ flags, the recommended approach is to use a :ref:`configuration file ` instead. +.. _mapping-paths-to-modules: + +Mapping file paths to modules +***************************** + +One of the main ways you can tell mypy what to type check +is by providing mypy a list of paths. For example:: + + $ mypy file_1.py foo/file_2.py file_3.pyi some/directory + +This section describes how exactly mypy maps the provided paths +to modules to type check. + +- Mypy will check all paths provided that correspond to files. + +- Mypy will recursively discover and check all files ending in ``.py`` or + ``.pyi`` in directory paths provided, after accounting for + :option:`--exclude `. + +- For each file to be checked, mypy will attempt to associate the file (e.g. + ``project/foo/bar/baz.py``) with a fully qualified module name (e.g. + ``foo.bar.baz``). The directory the package is in (``project``) is then + added to mypy's module search paths. + +How mypy determines fully qualified module names depends on if the options +:option:`--no-namespace-packages ` and +:option:`--explicit-package-bases ` are set. + +1. If :option:`--no-namespace-packages ` is set, + mypy will rely solely upon the presence of ``__init__.py[i]`` files to + determine the fully qualified module name. That is, mypy will crawl up the + directory tree for as long as it continues to find ``__init__.py`` (or + ``__init__.pyi``) files. + + For example, if your directory tree consists of ``pkg/subpkg/mod.py``, mypy + would require ``pkg/__init__.py`` and ``pkg/subpkg/__init__.py`` to exist in + order correctly associate ``mod.py`` with ``pkg.subpkg.mod`` + +2. The default case. If :option:`--namespace-packages ` is on, but :option:`--explicit-package-bases ` is off, mypy will allow for the possibility that + directories without ``__init__.py[i]`` are packages. Specifically, mypy will + look at all parent directories of the file and use the location of the + highest ``__init__.py[i]`` in the directory tree to determine the top-level + package. + + For example, say your directory tree consists solely of ``pkg/__init__.py`` + and ``pkg/a/b/c/d/mod.py``. When determining ``mod.py``'s fully qualified + module name, mypy will look at ``pkg/__init__.py`` and conclude that the + associated module name is ``pkg.a.b.c.d.mod``. + +3. You'll notice that the above case still relies on ``__init__.py``. If + you can't put an ``__init__.py`` in your top-level package, but still wish to + pass paths (as opposed to packages or modules using the ``-p`` or ``-m`` + flags), :option:`--explicit-package-bases ` + provides a solution. + + With :option:`--explicit-package-bases `, mypy + will locate the nearest parent directory that is a member of the ``MYPYPATH`` + environment variable, the :confval:`mypy_path` config or is the current + working directory. Mypy will then use the relative path to determine the + fully qualified module name. + + For example, say your directory tree consists solely of + ``src/namespace_pkg/mod.py``. If you run the following command, mypy + will correctly associate ``mod.py`` with ``namespace_pkg.mod``:: + + $ MYPYPATH=src mypy --namespace-packages --explicit-package-bases . + +If you pass a file not ending in ``.py[i]``, the module name assumed is +``__main__`` (matching the behavior of the Python interpreter), unless +:option:`--scripts-are-modules ` is passed. + +Passing :option:`-v ` will show you the files and associated module +names that mypy will check. + How mypy handles imports ************************ @@ -138,7 +213,7 @@ the import. This can cause errors that look like the following: .. code-block:: text main.py:1: error: Skipping analyzing 'django': module is installed, but missing library stubs or py.typed marker - main.py:2: error: Library stubs not installed for "requests" (or incompatible with Python 3.8) + main.py:2: error: Library stubs not installed for "requests" main.py:3: error: Cannot find implementation or library stub for module named "this_module_does_not_exist" If you get any of these errors on an import, mypy will assume the type of that @@ -153,6 +228,11 @@ attribute of the module will automatically succeed: # But this type checks, and x will have type 'Any' x = does_not_exist.foobar() +This can result in mypy failing to warn you about errors in your code. Since +operations on ``Any`` result in ``Any``, these dynamic types can propagate +through your code, making type checking less effective. See +:ref:`dynamic-typing` for more information. + The next sections describe what each of these errors means and recommended next steps; scroll to the section that matches your error. @@ -170,12 +250,12 @@ unless they either have declared themselves to be themselves on `typeshed `_, the repository of types for the standard library and some 3rd party libraries. -If you are getting this error, try: +If you are getting this error, try to obtain type hints for the library you're using: 1. Upgrading the version of the library you're using, in case a newer version has started to include type hints. -2. Searching to see if there is a :ref:`PEP 561 compliant stub package `. +2. Searching to see if there is a :ref:`PEP 561 compliant stub package ` corresponding to your third party library. Stub packages let you install type hints independently from the library itself. @@ -189,7 +269,7 @@ If you are getting this error, try: adding the location to the ``MYPYPATH`` environment variable. These stub files do not need to be complete! A good strategy is to use - stubgen, a program that comes bundled with mypy, to generate a first + :ref:`stubgen `, a program that comes bundled with mypy, to generate a first rough draft of the stubs. You can then iterate on just the parts of the library you need. @@ -198,16 +278,19 @@ If you are getting this error, try: :ref:`PEP 561 compliant packages `. If you are unable to find any existing type hints nor have time to write your -own, you can instead *suppress* the errors. All this will do is make mypy stop -reporting an error on the line containing the import: the imported module -will continue to be of type ``Any``. +own, you can instead *suppress* the errors. + +All this will do is make mypy stop reporting an error on the line containing the +import: the imported module will continue to be of type ``Any``, and mypy may +not catch errors in its use. 1. To suppress a *single* missing import error, add a ``# type: ignore`` at the end of the line containing the import. 2. To suppress *all* missing import errors from a single library, add - a section to your :ref:`mypy config file ` for that library setting - :confval:`ignore_missing_imports` to True. For example, suppose your codebase + a per-module section to your :ref:`mypy config file ` setting + :confval:`ignore_missing_imports` to True for that library. For example, + suppose your codebase makes heavy use of an (untyped) library named ``foobar``. You can silence all import errors associated with that library and that library alone by adding the following section to your config file:: @@ -243,38 +326,39 @@ the library, you will get a message like this: .. code-block:: text - main.py:1: error: Library stubs not installed for "yaml" (or incompatible with Python 3.8) + main.py:1: error: Library stubs not installed for "yaml" main.py:1: note: Hint: "python3 -m pip install types-PyYAML" main.py:1: note: (or run "mypy --install-types" to install all missing stub packages) -You can resolve the issue by running the suggested pip command or -commands. Alternatively, you can use :option:`--install-types ` to install all known missing stubs: +You can resolve the issue by running the suggested pip commands. +If you're running mypy in CI, you can ensure the presence of any stub packages +you need the same as you would any other test dependency, e.g. by adding them to +the appropriate ``requirements.txt`` file. + +Alternatively, add the :option:`--install-types ` +to your mypy command to install all known missing stubs: .. code-block:: text mypy --install-types -This installs any stub packages that were suggested in the previous -mypy run. You can also use your normal mypy command line with the -extra :option:`--install-types ` option to -install missing stubs at the end of the run (if any were found). +This is slower than explicitly installing stubs, since it effectively +runs mypy twice -- the first time to find the missing stubs, and +the second time to type check your code properly after mypy has +installed the stubs. It also can make controlling stub versions harder, +resulting in less reproducible type checking. -Use :option:`--install-types ` with -:option:`--non-interactive ` to install all suggested -stub packages without asking for confirmation, *and* type check your -code, in a single command: +By default, :option:`--install-types ` shows a confirmation prompt. +Use :option:`--non-interactive ` to install all suggested +stub packages without asking for confirmation *and* type check your code: -.. code-block:: text +If you've already installed the relevant third-party libraries in an environment +other than the one mypy is running in, you can use :option:`--python-executable +` flag to point to the Python executable for that +environment, and mypy will find packages installed for that Python executable. - mypy --install-types --non-interactive src/ - -This can be useful in Continuous Integration jobs if you'd prefer not -to manage stub packages manually. This is somewhat slower than -explicitly installing stubs before running mypy, since it may type -check your code twice -- the first time to find the missing stubs, and -the second time to type check your code properly after mypy has -installed the stubs. +If you've installed the relevant stub packages and are still getting this error, +see the :ref:`section below `. .. _missing-type-hints-for-third-party-library: @@ -298,6 +382,11 @@ this error, try: line flag to point the Python interpreter containing your installed third party packages. + You can confirm that you are running mypy from the environment you expect + by running it like ``python -m mypy ...``. You can confirm that you are + installing into the environment you expect by running pip like + ``python -m pip ...``. + 2. Reading the :ref:`finding-imports` section below to make sure you understand how exactly mypy searches for and finds modules and modify how you're invoking mypy accordingly. @@ -314,18 +403,64 @@ this error, try: you must run ``mypy ~/foo-project/src`` (or set the ``MYPYPATH`` to ``~/foo-project/src``. -4. If you are using namespace packages -- packages which do not contain - ``__init__.py`` files within each subfolder -- using the - :option:`--namespace-packages ` command - line flag. +.. _finding-imports: + +How imports are found +********************* + +When mypy encounters an ``import`` statement or receives module +names from the command line via the :option:`--module ` or :option:`--package ` +flags, mypy tries to find the module on the file system similar +to the way Python finds it. However, there are some differences. + +First, mypy has its own search path. +This is computed from the following items: + +- The ``MYPYPATH`` environment variable + (a list of directories, colon-separated on UNIX systems, semicolon-separated on Windows). +- The :confval:`mypy_path` config file option. +- The directories containing the sources given on the command line + (see :ref:`Mapping file paths to modules `). +- The installed packages marked as safe for type checking (see + :ref:`PEP 561 support `) +- The relevant directories of the + `typeshed `_ repo. + +.. note:: -In some rare cases, you may get the "Cannot find implementation or library -stub for module" error even when the module is installed in your system. -This can happen when the module is both missing type hints and is installed -on your system in an unconventional way. + You cannot point to a stub-only package (:pep:`561`) via the ``MYPYPATH``, it must be + installed (see :ref:`PEP 561 support `) -In this case, follow the steps above on how to handle -:ref:`missing type hints in third party libraries `. +Second, mypy searches for stub files in addition to regular Python files +and packages. +The rules for searching for a module ``foo`` are as follows: + +- The search looks in each of the directories in the search path + (see above) until a match is found. +- If a package named ``foo`` is found (i.e. a directory + ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file) + that's a match. +- If a stub file named ``foo.pyi`` is found, that's a match. +- If a Python module named ``foo.py`` is found, that's a match. + +These matches are tried in order, so that if multiple matches are found +in the same directory on the search path +(e.g. a package and a Python file, or a stub file and a Python file) +the first one in the above list wins. + +In particular, if a Python file and a stub file are both present in the +same directory on the search path, only the stub file is used. +(However, if the files are in different directories, the one found +in the earlier directory is used.) + +Setting :confval:`mypy_path`/``MYPYPATH`` is mostly useful in the case +where you want to try running mypy against multiple distinct +sets of files that happen to share some common dependencies. + +For example, if you have multiple projects that happen to be +using the same set of work-in-progress stubs, it could be +convenient to just have your ``MYPYPATH`` point to a single +directory containing the stubs. .. _follow-imports: @@ -388,152 +523,3 @@ hard-to-debug errors. Adjusting import following behaviour is often most useful when restricted to specific modules. This can be accomplished by setting a per-module :confval:`follow_imports` config option. - - -.. _mapping-paths-to-modules: - -Mapping file paths to modules -***************************** - -One of the main ways you can tell mypy what to type check -is by providing mypy a list of paths. For example:: - - $ mypy file_1.py foo/file_2.py file_3.pyi some/directory - -This section describes how exactly mypy maps the provided paths -to modules to type check. - -- Mypy will check all paths provided that correspond to files. - -- Mypy will recursively discover and check all files ending in ``.py`` or - ``.pyi`` in directory paths provided, after accounting for - :option:`--exclude `. - -- For each file to be checked, mypy will attempt to associate the file (e.g. - ``project/foo/bar/baz.py``) with a fully qualified module name (e.g. - ``foo.bar.baz``). The directory the package is in (``project``) is then - added to mypy's module search paths. - -How mypy determines fully qualified module names depends on if the options -:option:`--namespace-packages ` and -:option:`--explicit-package-bases ` are set. - -1. If :option:`--namespace-packages ` is off, - mypy will rely solely upon the presence of ``__init__.py[i]`` files to - determine the fully qualified module name. That is, mypy will crawl up the - directory tree for as long as it continues to find ``__init__.py`` (or - ``__init__.pyi``) files. - - For example, if your directory tree consists of ``pkg/subpkg/mod.py``, mypy - would require ``pkg/__init__.py`` and ``pkg/subpkg/__init__.py`` to exist in - order correctly associate ``mod.py`` with ``pkg.subpkg.mod`` - -2. If :option:`--namespace-packages ` is on, but - :option:`--explicit-package-bases ` is off, - mypy will allow for the possibility that directories without - ``__init__.py[i]`` are packages. Specifically, mypy will look at all parent - directories of the file and use the location of the highest - ``__init__.py[i]`` in the directory tree to determine the top-level package. - - For example, say your directory tree consists solely of ``pkg/__init__.py`` - and ``pkg/a/b/c/d/mod.py``. When determining ``mod.py``'s fully qualified - module name, mypy will look at ``pkg/__init__.py`` and conclude that the - associated module name is ``pkg.a.b.c.d.mod``. - -3. You'll notice that the above case still relies on ``__init__.py``. If - you can't put an ``__init__.py`` in your top-level package, but still wish to - pass paths (as opposed to packages or modules using the ``-p`` or ``-m`` - flags), :option:`--explicit-package-bases ` - provides a solution. - - With :option:`--explicit-package-bases `, mypy - will locate the nearest parent directory that is a member of the ``MYPYPATH`` - environment variable, the :confval:`mypy_path` config or is the current - working directory. Mypy will then use the relative path to determine the - fully qualified module name. - - For example, say your directory tree consists solely of - ``src/namespace_pkg/mod.py``. If you run the following command, mypy - will correctly associate ``mod.py`` with ``namespace_pkg.mod``:: - - $ MYPYPATH=src mypy --namespace-packages --explicit-package-bases . - -If you pass a file not ending in ``.py[i]``, the module name assumed is -``__main__`` (matching the behavior of the Python interpreter), unless -:option:`--scripts-are-modules ` is passed. - -Passing :option:`-v ` will show you the files and associated module -names that mypy will check. - - -.. _finding-imports: - -How imports are found -********************* - -When mypy encounters an ``import`` statement or receives module -names from the command line via the :option:`--module ` or :option:`--package ` -flags, mypy tries to find the module on the file system similar -to the way Python finds it. However, there are some differences. - -First, mypy has its own search path. -This is computed from the following items: - -- The ``MYPYPATH`` environment variable - (a colon-separated list of directories). -- The :confval:`mypy_path` config file option. -- The directories containing the sources given on the command line - (see :ref:`Mapping file paths to modules `). -- The installed packages marked as safe for type checking (see - :ref:`PEP 561 support `) -- The relevant directories of the - `typeshed `_ repo. - -.. note:: - - You cannot point to a stub-only package (:pep:`561`) via the ``MYPYPATH``, it must be - installed (see :ref:`PEP 561 support `) - -Second, mypy searches for stub files in addition to regular Python files -and packages. -The rules for searching for a module ``foo`` are as follows: - -- The search looks in each of the directories in the search path - (see above) until a match is found. -- If a package named ``foo`` is found (i.e. a directory - ``foo`` containing an ``__init__.py`` or ``__init__.pyi`` file) - that's a match. -- If a stub file named ``foo.pyi`` is found, that's a match. -- If a Python module named ``foo.py`` is found, that's a match. - -These matches are tried in order, so that if multiple matches are found -in the same directory on the search path -(e.g. a package and a Python file, or a stub file and a Python file) -the first one in the above list wins. - -In particular, if a Python file and a stub file are both present in the -same directory on the search path, only the stub file is used. -(However, if the files are in different directories, the one found -in the earlier directory is used.) - - -Other advice and best practices -******************************* - -There are multiple ways of telling mypy what files to type check, ranging -from passing in command line arguments to using the :confval:`files` or :confval:`mypy_path` -config file options to setting the -``MYPYPATH`` environment variable. - -However, in practice, it is usually sufficient to just use either -command line arguments or the :confval:`files` config file option (the two -are largely interchangeable). - -Setting :confval:`mypy_path`/``MYPYPATH`` is mostly useful in the case -where you want to try running mypy against multiple distinct -sets of files that happen to share some common dependencies. - -For example, if you have multiple projects that happen to be -using the same set of work-in-progress stubs, it could be -convenient to just have your ``MYPYPATH`` point to a single -directory containing the stubs. diff --git a/docs/source/runtime_troubles.rst b/docs/source/runtime_troubles.rst index 1bab66194e47..a62652111de6 100644 --- a/docs/source/runtime_troubles.rst +++ b/docs/source/runtime_troubles.rst @@ -8,8 +8,8 @@ version of Python considers legal code. This section describes these scenarios and explains how to get your code running again. Generally speaking, we have three tools at our disposal: -* For Python 3.7 through 3.9, use of ``from __future__ import annotations`` - (:pep:`563`), made the default in Python 3.11 and later +* Use of ``from __future__ import annotations`` (:pep:`563`) + (this behaviour may eventually be made the default in a future Python version) * Use of string literal types or type comments * Use of ``typing.TYPE_CHECKING`` @@ -18,11 +18,33 @@ problems you may encounter. .. _string-literal-types: -String literal types --------------------- +String literal types and type comments +-------------------------------------- + +Mypy allows you to add type annotations using ``# type:`` type comments. +For example: + +.. code-block:: python + + a = 1 # type: int + + def f(x): # type: (int) -> int + return x + 1 + + # Alternative type comment syntax for functions with many arguments + def send_email( + address, # type: Union[str, List[str]] + sender, # type: str + cc, # type: Optional[List[str]] + subject='', + body=None # type: List[str] + ): + # type: (...) -> bool Type comments can't cause runtime errors because comments are not evaluated by -Python. In a similar way, using string literal types sidesteps the problem of +Python. + +In a similar way, using string literal types sidesteps the problem of annotations that would cause runtime errors. Any type can be entered as a string literal, and you can combine @@ -30,8 +52,8 @@ string-literal types with non-string-literal types freely: .. code-block:: python - def f(a: list['A']) -> None: ... # OK - def g(n: 'int') -> None: ... # OK, though not useful + def f(a: list['A']) -> None: ... # OK, prevents NameError since A is defined later + def g(n: 'int') -> None: ... # Also OK, though not useful class A: pass @@ -47,9 +69,10 @@ Future annotations import (PEP 563) ----------------------------------- Many of the issues described here are caused by Python trying to evaluate -annotations. From Python 3.11 on, Python will no longer attempt to evaluate -function and variable annotations. This behaviour is made available in Python -3.7 and later through the use of ``from __future__ import annotations``. +annotations. Future Python versions (potentially Python 3.12) will by default no +longer attempt to evaluate function and variable annotations. This behaviour is +made available in Python 3.7 and later through the use of +``from __future__ import annotations``. This can be thought of as automatic string literal-ification of all function and variable annotations. Note that function and variable annotations are still @@ -74,7 +97,7 @@ required to be valid Python syntax. For more details, see :pep:`563`. class B: ... class C: ... -.. note:: +.. warning:: Some libraries may have use cases for dynamic evaluation of annotations, for instance, through use of ``typing.get_type_hints`` or ``eval``. If your @@ -273,8 +296,8 @@ the built-in collections or those from :py:mod:`collections.abc`: y: dict[int, str] z: Sequence[str] = x -There is limited support for using this syntax in Python 3.7 and later as well. -If you use ``from __future__ import annotations``, mypy will understand this +There is limited support for using this syntax in Python 3.7 and later as well: +if you use ``from __future__ import annotations``, mypy will understand this syntax in annotations. However, since this will not be supported by the Python interpreter at runtime, make sure you're aware of the caveats mentioned in the notes at :ref:`future annotations import`. @@ -285,8 +308,8 @@ Using X | Y syntax for Unions Starting with Python 3.10 (:pep:`604`), you can spell union types as ``x: int | str``, instead of ``x: typing.Union[int, str]``. -There is limited support for using this syntax in Python 3.7 and later as well. -If you use ``from __future__ import annotations``, mypy will understand this +There is limited support for using this syntax in Python 3.7 and later as well: +if you use ``from __future__ import annotations``, mypy will understand this syntax in annotations, string literal types, type comments and stub files. However, since this will not be supported by the Python interpreter at runtime (if evaluated, ``int | str`` will raise ``TypeError: unsupported operand type(s) diff --git a/docs/source/stubs.rst b/docs/source/stubs.rst index af47a0e2afdd..7c84a9718b3e 100644 --- a/docs/source/stubs.rst +++ b/docs/source/stubs.rst @@ -3,12 +3,15 @@ Stub files ========== +A *stub file* is a file containing a skeleton of the public interface +of that Python module, including classes, variables, functions -- and +most importantly, their types. + Mypy uses stub files stored in the `typeshed `_ repository to determine the types of standard library and third-party library functions, classes, and other definitions. You can also create your own stubs that will be -used to type check your code. The basic properties of stubs were introduced -back in :ref:`stubs-intro`. +used to type check your code. Creating a stub *************** diff --git a/docs/source/stubtest.rst b/docs/source/stubtest.rst index ca291f55947e..f3c036f56c06 100644 --- a/docs/source/stubtest.rst +++ b/docs/source/stubtest.rst @@ -41,6 +41,10 @@ stubs and implementation or to check for stub completeness. It's used to test Python's official collection of library stubs, `typeshed `_. +.. warning:: + + stubtest will import and execute Python code from the packages it checks. + Example ******* @@ -65,7 +69,7 @@ Here's a quick example of what stubtest can do: error: library.foo is inconsistent, runtime argument "x" has a default value but stub argument does not Stub: at line 3 def (x: builtins.int) - Runtime: at line 3 in file ~/library.py + Runtime: in file ~/library.py:3 def (x=None) error: library.x variable differs from runtime type Literal['hello, stubtest'] diff --git a/docs/source/type_inference_and_annotations.rst b/docs/source/type_inference_and_annotations.rst index 47a29a6abf95..6adb4e651224 100644 --- a/docs/source/type_inference_and_annotations.rst +++ b/docs/source/type_inference_and_annotations.rst @@ -1,22 +1,35 @@ +.. _type-inference-and-annotations: + Type inference and type annotations =================================== Type inference ************** -Mypy considers the initial assignment as the definition of a variable. -If you do not explicitly -specify the type of the variable, mypy infers the type based on the -static type of the value expression: +For most variables, if you do not explicitly specify its type, mypy will +infer the correct type based on what is initially assigned to the variable. .. code-block:: python - i = 1 # Infer type "int" for i - l = [1, 2] # Infer type "list[int]" for l + # Mypy will infer the type of these variables, despite no annotations + i = 1 + reveal_type(i) # Revealed type is "builtins.int" + l = [1, 2] + reveal_type(l) # Revealed type is "builtins.list[builtins.int]" + + +.. note:: + + Note that mypy will not use type inference in dynamically typed functions + (those without a function type annotation) — every local variable type + defaults to ``Any`` in such functions. For more details, see :ref:`dynamic-typing`. -Type inference is not used in dynamically typed functions (those -without a function type annotation) — every local variable type defaults -to ``Any`` in such functions. ``Any`` is discussed later in more detail. + .. code-block:: python + + def untyped_function(): + i = 1 + reveal_type(i) # Revealed type is "Any" + # 'reveal_type' always outputs 'Any' in unchecked functions .. _explicit-var-types: @@ -35,20 +48,33 @@ variable type annotation: Without the type annotation, the type of ``x`` would be just ``int``. We use an annotation to give it a more general type ``Union[int, str]`` (this type means that the value can be either an ``int`` or a ``str``). -Mypy checks that the type of the initializer is compatible with the -declared type. The following example is not valid, since the initializer is -a floating point number, and this is incompatible with the declared -type: + +The best way to think about this is that the type annotation sets the type of +the variable, not the type of the expression. For instance, mypy will complain +about the following code: .. code-block:: python - x: Union[int, str] = 1.1 # Error! + x: Union[int, str] = 1.1 # error: Incompatible types in assignment + # (expression has type "float", variable has type "Union[int, str]") .. note:: - The best way to think about this is that the type annotation sets the - type of the variable, not the type of the expression. To force the - type of an expression you can use :py:func:`cast(\, \) `. + To explicitly override the type of an expression you can use + :py:func:`cast(\, \) `. + See :ref:`casts` for details. + +Note that you can explicitly declare the type of a variable without +giving it an initial value: + +.. code-block:: python + + # We only unpack two values, so there's no right-hand side value + # for mypy to infer the type of "cs" from: + a, b, *cs = 1, 2 # error: Need type annotation for "cs" + + rs: list[int] # no assignment! + p, q, *rs = 1, 2 # OK Explicit types for collections ****************************** @@ -67,15 +93,9 @@ In these cases you can give the type explicitly using a type annotation: .. code-block:: python - l: list[int] = [] # Create empty list with type list[int] + l: list[int] = [] # Create empty list of int d: dict[str, int] = {} # Create empty dictionary (str -> int) -Similarly, you can also give an explicit type when creating an empty set: - -.. code-block:: python - - s: set[int] = set() - .. note:: Using type arguments (e.g. ``list[int]``) on builtin collections like @@ -88,13 +108,14 @@ Similarly, you can also give an explicit type when creating an empty set: Compatibility of container types ******************************** -The following program generates a mypy error, since ``list[int]`` -is not compatible with ``list[object]``: +A quick note: container types can sometimes be unintuitive. We'll discuss this +more in :ref:`variance`. For example, the following program generates a mypy error, +because mypy treats ``list[int]`` as incompatible with ``list[object]``: .. code-block:: python def f(l: list[object], k: list[int]) -> None: - l = k # Type check error: incompatible types in assignment + l = k # error: Incompatible types in assignment The reason why the above assignment is disallowed is that allowing the assignment could result in non-int values stored in a list of ``int``: @@ -106,33 +127,32 @@ assignment could result in non-int values stored in a list of ``int``: l.append('x') print(k[-1]) # Ouch; a string in list[int] -Other container types like :py:class:`dict` and :py:class:`set` behave similarly. We -will discuss how you can work around this in :ref:`variance`. +Other container types like :py:class:`dict` and :py:class:`set` behave similarly. -You can still run the above program; it prints ``x``. This illustrates -the fact that static types are used during type checking, but they do -not affect the runtime behavior of programs. You can run programs with -type check failures, which is often very handy when performing a large -refactoring. Thus you can always 'work around' the type system, and it +You can still run the above program; it prints ``x``. This illustrates the fact +that static types do not affect the runtime behavior of programs. You can run +programs with type check failures, which is often very handy when performing a +large refactoring. Thus you can always 'work around' the type system, and it doesn't really limit what you can do in your program. Context in type inference ************************* -Type inference is *bidirectional* and takes context into account. For -example, the following is valid: +Type inference is *bidirectional* and takes context into account. + +Mypy will take into account the type of the variable on the left-hand side +of an assignment when inferring the type of the expression on the right-hand +side. For example, the following will type check: .. code-block:: python def f(l: list[object]) -> None: l = [1, 2] # Infer type list[object] for [1, 2], not list[int] -In an assignment, the type context is determined by the assignment -target. In this case this is ``l``, which has the type -``list[object]``. The value expression ``[1, 2]`` is type checked in -this context and given the type ``list[object]``. In the previous -example we introduced a new variable ``l``, and here the type context -was empty. + +The value expression ``[1, 2]`` is type checked with the additional +context that it is being assigned to a variable of type ``list[object]``. +This is used to infer the type of the *expression* as ``list[object]``. Declared argument types are also used for type context. In this program mypy knows that the empty list ``[]`` should have type ``list[int]`` based @@ -165,28 +185,7 @@ Working around the issue is easy by adding a type annotation: a: list[int] = [] # OK foo(a) -Starred expressions -******************* - -In most cases, mypy can infer the type of starred expressions from the -right-hand side of an assignment, but not always: - -.. code-block:: python - - a, *bs = 1, 2, 3 # OK - p, q, *rs = 1, 2 # Error: Type of rs cannot be inferred - -On first line, the type of ``bs`` is inferred to be -``list[int]``. However, on the second line, mypy cannot infer the type -of ``rs``, because there is no right-hand side value for ``rs`` to -infer the type from. In cases like these, the starred expression needs -to be annotated with a starred type: - -.. code-block:: python - - p, q, *rs = 1, 2 # type: int, int, list[int] - -Here, the type of ``rs`` is set to ``list[int]``. +.. _silencing-type-errors: Silencing type errors ********************* @@ -194,22 +193,24 @@ Silencing type errors You might want to disable type checking on specific lines, or within specific files in your codebase. To do that, you can use a ``# type: ignore`` comment. -For example, say that the web framework that you use now takes an integer -argument to ``run()``, which starts it on localhost on that port. Like so: +For example, say in its latest update, the web framework you use can now take an +integer argument to ``run()``, which starts it on localhost on that port. +Like so: .. code-block:: python # Starting app on http://localhost:8000 app.run(8000) -However, the type stubs that the package uses is not up-to-date, and it still -expects only ``str`` types for ``run()``. This would give you the following error: +However, the devs forgot to update their type annotations for +``run``, so mypy still thinks ``run`` only expects ``str`` types. +This would give you the following error: .. code-block:: text error: Argument 1 to "run" of "A" has incompatible type "int"; expected "str" -If you cannot directly fix the type stubs yourself, you can temporarily +If you cannot directly fix the web framework yourself, you can temporarily disable type checking on that line, by adding a ``# type: ignore``: .. code-block:: python @@ -227,11 +228,12 @@ short explanation of the bug. To do that, use this format: .. code-block:: python # Starting app on http://localhost:8000 - app.run(8000) # type: ignore # `run()` now accepts an `int`, as a port + app.run(8000) # type: ignore # `run()` in v2.0 accepts an `int`, as a port +Type ignore error codes +----------------------- -Mypy displays an error code for each error if you use -:option:`--show-error-codes `: +By default, mypy displays an error code for each error: .. code-block:: text @@ -242,18 +244,52 @@ It is possible to add a specific error-code in your ignore comment (e.g. ``# type: ignore[attr-defined]``) to clarify what's being silenced. You can find more information about error codes :ref:`here `. -Similarly, you can also ignore all mypy checks in a file, by adding a -``# type: ignore`` at the top of the file: +Other ways to silence errors +---------------------------- + +You can get mypy to silence errors about a specific variable by dynamically +typing it with ``Any``. See :ref:`dynamic-typing` for more information. .. code-block:: python - # type: ignore + from typing import Any + + def f(x: Any, y: str) -> None: + x = 'hello' + x += 1 # OK + +You can ignore all mypy errors in a file by adding a +``# mypy: ignore-errors`` at the top of the file: + +.. code-block:: python + + # mypy: ignore-errors # This is a test file, skipping type checking in it. import unittest ... +You can also specify per-module configuration options in your :ref:`config-file`. +For example: + +.. code-block:: ini + + # Don't report errors in the 'package_to_fix_later' package + [mypy-package_to_fix_later.*] + ignore_errors = True + + # Disable specific error codes in the 'tests' package + # Also don't require type annotations + [mypy-tests.*] + disable_error_code = var-annotated, has-type + allow_untyped_defs = True + + # Silence import errors from the 'library_missing_types' package + [mypy-library_missing_types.*] + ignore_missing_imports = True + Finally, adding a ``@typing.no_type_check`` decorator to a class, method or -function has the effect of ignoring that class, method or function. +function causes mypy to avoid type checking that class, method or function +and to treat it as not having any type annotations. .. code-block:: python diff --git a/docs/source/type_narrowing.rst b/docs/source/type_narrowing.rst index 806835ed33a5..72a816679140 100644 --- a/docs/source/type_narrowing.rst +++ b/docs/source/type_narrowing.rst @@ -16,7 +16,7 @@ The simplest way to narrow a type is to use one of the supported expressions: - :py:func:`isinstance` like in ``isinstance(obj, float)`` will narrow ``obj`` to have ``float`` type - :py:func:`issubclass` like in ``issubclass(cls, MyClass)`` will narrow ``cls`` to be ``Type[MyClass]`` -- :py:func:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type +- :py:class:`type` like in ``type(obj) is int`` will narrow ``obj`` to have ``int`` type - :py:func:`callable` like in ``callable(obj)`` will narrow object to callable type Type narrowing is contextual. For example, based on the condition, mypy will narrow an expression only within an ``if`` branch: diff --git a/docs/source/typed_dict.rst b/docs/source/typed_dict.rst new file mode 100644 index 000000000000..19a717d7feb7 --- /dev/null +++ b/docs/source/typed_dict.rst @@ -0,0 +1,250 @@ +.. _typeddict: + +TypedDict +********* + +Python programs often use dictionaries with string keys to represent objects. +``TypedDict`` lets you give precise types for dictionaries that represent +objects with a fixed schema, such as ``{'id': 1, 'items': ['x']}``. + +Here is a typical example: + +.. code-block:: python + + movie = {'name': 'Blade Runner', 'year': 1982} + +Only a fixed set of string keys is expected (``'name'`` and +``'year'`` above), and each key has an independent value type (``str`` +for ``'name'`` and ``int`` for ``'year'`` above). We've previously +seen the ``dict[K, V]`` type, which lets you declare uniform +dictionary types, where every value has the same type, and arbitrary keys +are supported. This is clearly not a good fit for +``movie`` above. Instead, you can use a ``TypedDict`` to give a precise +type for objects like ``movie``, where the type of each +dictionary value depends on the key: + +.. code-block:: python + + from typing_extensions import TypedDict + + Movie = TypedDict('Movie', {'name': str, 'year': int}) + + movie: Movie = {'name': 'Blade Runner', 'year': 1982} + +``Movie`` is a ``TypedDict`` type with two items: ``'name'`` (with type ``str``) +and ``'year'`` (with type ``int``). Note that we used an explicit type +annotation for the ``movie`` variable. This type annotation is +important -- without it, mypy will try to infer a regular, uniform +:py:class:`dict` type for ``movie``, which is not what we want here. + +.. note:: + + If you pass a ``TypedDict`` object as an argument to a function, no + type annotation is usually necessary since mypy can infer the + desired type based on the declared argument type. Also, if an + assignment target has been previously defined, and it has a + ``TypedDict`` type, mypy will treat the assigned value as a ``TypedDict``, + not :py:class:`dict`. + +Now mypy will recognize these as valid: + +.. code-block:: python + + name = movie['name'] # Okay; type of name is str + year = movie['year'] # Okay; type of year is int + +Mypy will detect an invalid key as an error: + +.. code-block:: python + + director = movie['director'] # Error: 'director' is not a valid key + +Mypy will also reject a runtime-computed expression as a key, as +it can't verify that it's a valid key. You can only use string +literals as ``TypedDict`` keys. + +The ``TypedDict`` type object can also act as a constructor. It +returns a normal :py:class:`dict` object at runtime -- a ``TypedDict`` does +not define a new runtime type: + +.. code-block:: python + + toy_story = Movie(name='Toy Story', year=1995) + +This is equivalent to just constructing a dictionary directly using +``{ ... }`` or ``dict(key=value, ...)``. The constructor form is +sometimes convenient, since it can be used without a type annotation, +and it also makes the type of the object explicit. + +Like all types, ``TypedDict``\s can be used as components to build +arbitrarily complex types. For example, you can define nested +``TypedDict``\s and containers with ``TypedDict`` items. +Unlike most other types, mypy uses structural compatibility checking +(or structural subtyping) with ``TypedDict``\s. A ``TypedDict`` object with +extra items is compatible with (a subtype of) a narrower +``TypedDict``, assuming item types are compatible (*totality* also affects +subtyping, as discussed below). + +A ``TypedDict`` object is not a subtype of the regular ``dict[...]`` +type (and vice versa), since :py:class:`dict` allows arbitrary keys to be +added and removed, unlike ``TypedDict``. However, any ``TypedDict`` object is +a subtype of (that is, compatible with) ``Mapping[str, object]``, since +:py:class:`~typing.Mapping` only provides read-only access to the dictionary items: + +.. code-block:: python + + def print_typed_dict(obj: Mapping[str, object]) -> None: + for key, value in obj.items(): + print(f'{key}: {value}') + + print_typed_dict(Movie(name='Toy Story', year=1995)) # OK + +.. note:: + + Unless you are on Python 3.8 or newer (where ``TypedDict`` is available in + standard library :py:mod:`typing` module) you need to install ``typing_extensions`` + using pip to use ``TypedDict``: + + .. code-block:: text + + python3 -m pip install --upgrade typing-extensions + +Totality +-------- + +By default mypy ensures that a ``TypedDict`` object has all the specified +keys. This will be flagged as an error: + +.. code-block:: python + + # Error: 'year' missing + toy_story: Movie = {'name': 'Toy Story'} + +Sometimes you want to allow keys to be left out when creating a +``TypedDict`` object. You can provide the ``total=False`` argument to +``TypedDict(...)`` to achieve this: + +.. code-block:: python + + GuiOptions = TypedDict( + 'GuiOptions', {'language': str, 'color': str}, total=False) + options: GuiOptions = {} # Okay + options['language'] = 'en' + +You may need to use :py:meth:`~dict.get` to access items of a partial (non-total) +``TypedDict``, since indexing using ``[]`` could fail at runtime. +However, mypy still lets use ``[]`` with a partial ``TypedDict`` -- you +just need to be careful with it, as it could result in a :py:exc:`KeyError`. +Requiring :py:meth:`~dict.get` everywhere would be too cumbersome. (Note that you +are free to use :py:meth:`~dict.get` with total ``TypedDict``\s as well.) + +Keys that aren't required are shown with a ``?`` in error messages: + +.. code-block:: python + + # Revealed type is "TypedDict('GuiOptions', {'language'?: builtins.str, + # 'color'?: builtins.str})" + reveal_type(options) + +Totality also affects structural compatibility. You can't use a partial +``TypedDict`` when a total one is expected. Also, a total ``TypedDict`` is not +valid when a partial one is expected. + +Supported operations +-------------------- + +``TypedDict`` objects support a subset of dictionary operations and methods. +You must use string literals as keys when calling most of the methods, +as otherwise mypy won't be able to check that the key is valid. List +of supported operations: + +* Anything included in :py:class:`~typing.Mapping`: + + * ``d[key]`` + * ``key in d`` + * ``len(d)`` + * ``for key in d`` (iteration) + * :py:meth:`d.get(key[, default]) ` + * :py:meth:`d.keys() ` + * :py:meth:`d.values() ` + * :py:meth:`d.items() ` + +* :py:meth:`d.copy() ` +* :py:meth:`d.setdefault(key, default) ` +* :py:meth:`d1.update(d2) ` +* :py:meth:`d.pop(key[, default]) ` (partial ``TypedDict``\s only) +* ``del d[key]`` (partial ``TypedDict``\s only) + +.. note:: + + :py:meth:`~dict.clear` and :py:meth:`~dict.popitem` are not supported since they are unsafe + -- they could delete required ``TypedDict`` items that are not visible to + mypy because of structural subtyping. + +Class-based syntax +------------------ + +An alternative, class-based syntax to define a ``TypedDict`` is supported +in Python 3.6 and later: + +.. code-block:: python + + from typing_extensions import TypedDict + + class Movie(TypedDict): + name: str + year: int + +The above definition is equivalent to the original ``Movie`` +definition. It doesn't actually define a real class. This syntax also +supports a form of inheritance -- subclasses can define additional +items. However, this is primarily a notational shortcut. Since mypy +uses structural compatibility with ``TypedDict``\s, inheritance is not +required for compatibility. Here is an example of inheritance: + +.. code-block:: python + + class Movie(TypedDict): + name: str + year: int + + class BookBasedMovie(Movie): + based_on: str + +Now ``BookBasedMovie`` has keys ``name``, ``year`` and ``based_on``. + +Mixing required and non-required items +-------------------------------------- + +In addition to allowing reuse across ``TypedDict`` types, inheritance also allows +you to mix required and non-required (using ``total=False``) items +in a single ``TypedDict``. Example: + +.. code-block:: python + + class MovieBase(TypedDict): + name: str + year: int + + class Movie(MovieBase, total=False): + based_on: str + +Now ``Movie`` has required keys ``name`` and ``year``, while ``based_on`` +can be left out when constructing an object. A ``TypedDict`` with a mix of required +and non-required keys, such as ``Movie`` above, will only be compatible with +another ``TypedDict`` if all required keys in the other ``TypedDict`` are required keys in the +first ``TypedDict``, and all non-required keys of the other ``TypedDict`` are also non-required keys +in the first ``TypedDict``. + +Unions of TypedDicts +-------------------- + +Since TypedDicts are really just regular dicts at runtime, it is not possible to +use ``isinstance`` checks to distinguish between different variants of a Union of +TypedDict in the same way you can with regular objects. + +Instead, you can use the :ref:`tagged union pattern `. The referenced +section of the docs has a full description with an example, but in short, you will +need to give each TypedDict the same key where each value has a unique +:ref:`Literal type `. Then, check that key to distinguish +between your TypedDicts. diff --git a/misc/actions_stubs.py b/misc/actions_stubs.py deleted file mode 100644 index 3b13c5d28820..000000000000 --- a/misc/actions_stubs.py +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import annotations - -import os -import shutil -from typing import Any - -try: - import click -except ImportError: - print("You need the module 'click'") - exit(1) - -base_path = os.getcwd() - -# I don't know how to set callables with different args -def apply_all( - func: Any, - directory: str, - extension: str, - to_extension: str = "", - exclude: tuple[str] = ("",), - recursive: bool = True, - debug: bool = False, -) -> None: - excluded = [x + extension for x in exclude] if exclude else [] - for p, d, files in os.walk(os.path.join(base_path, directory)): - for f in files: - if f in excluded: - continue - inner_path = os.path.join(p, f) - if not inner_path.endswith(extension): - continue - if to_extension: - new_path = f"{inner_path[:-len(extension)]}{to_extension}" - func(inner_path, new_path) - else: - func(inner_path) - if not recursive: - break - - -def confirm(resp: bool = False, **kargs) -> bool: - kargs["rest"] = "to this {f2}/*{e2}".format(**kargs) if kargs.get("f2") else "" - prompt = "{act} all files {rec}matching this expression {f1}/*{e1} {rest}".format(**kargs) - prompt.format(**kargs) - prompt = "{} [{}]|{}: ".format(prompt, "Y" if resp else "N", "n" if resp else "y") - while True: - ans = input(prompt).lower() - if not ans: - return resp - if ans not in ["y", "n"]: - print("Please, enter (y) or (n).") - continue - if ans == "y": - return True - else: - return False - - -actions = ["cp", "mv", "rm"] - - -@click.command(context_settings=dict(help_option_names=["-h", "--help"])) -@click.option( - "--action", "-a", type=click.Choice(actions), required=True, help="What do I have to do :-)" -) -@click.option("--dir", "-d", "directory", default="stubs", help="Directory to start search!") -@click.option( - "--ext", - "-e", - "extension", - default=".py", - help='Extension "from" will be applied the action. Default .py', -) -@click.option( - "--to", - "-t", - "to_extension", - default=".pyi", - help='Extension "to" will be applied the action if can. Default .pyi', -) -@click.option( - "--exclude", - "-x", - multiple=True, - default=("__init__",), - help="For every appear, will ignore this files. (can set multiples times)", -) -@click.option( - "--not-recursive", - "-n", - default=True, - is_flag=True, - help="Set if don't want to walk recursively.", -) -def main( - action: str, - directory: str, - extension: str, - to_extension: str, - exclude: tuple[str], - not_recursive: bool, -) -> None: - """ - This script helps to copy/move/remove files based on their extension. - - The three actions will ask you for confirmation. - - Examples (by default the script search in stubs directory): - - - Change extension of all stubs from .py to .pyi: - - python -a mv - - - Revert the previous action. - - python -a mv -e .pyi -t .py - - - If you want to ignore "awesome.py" files. - - python -a [cp|mv|rm] -x awesome - - - If you want to ignore "awesome.py" and "__init__.py" files. - - python -a [cp|mv|rm] -x awesome -x __init__ - - - If you want to remove all ".todo" files in "todo" directory, but not recursively: - - python -a rm -e .todo -d todo -r - - """ - if action not in actions: - print("Your action have to be one of this: {}".format(", ".join(actions))) - return - - rec = "[Recursively] " if not_recursive else "" - if not extension.startswith("."): - extension = f".{extension}" - if not to_extension.startswith("."): - to_extension = f".{to_extension}" - if directory.endswith("/"): - directory = directory[:-1] - if action == "cp": - if confirm(act="Copy", rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension): - apply_all(shutil.copy, directory, extension, to_extension, exclude, not_recursive) - elif action == "rm": - if confirm(act="Remove", rec=rec, f1=directory, e1=extension): - apply_all(os.remove, directory, extension, exclude=exclude, recursive=not_recursive) - elif action == "mv": - if confirm(act="Move", rec=rec, f1=directory, e1=extension, f2=directory, e2=to_extension): - apply_all(shutil.move, directory, extension, to_extension, exclude, not_recursive) - - -if __name__ == "__main__": - main() diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py index 25ae9713f6f1..8b805d8da0bc 100644 --- a/misc/analyze_cache.py +++ b/misc/analyze_cache.py @@ -30,7 +30,7 @@ def __init__( self.meta_size = meta_size @property - def total_size(self): + def total_size(self) -> int: return self.data_size + self.meta_size @@ -75,7 +75,7 @@ def pluck(name: str, chunks: Iterable[JsonDict]) -> Iterable[JsonDict]: return (chunk for chunk in chunks if chunk[".class"] == name) -def report_counter(counter: Counter, amount: int | None = None) -> None: +def report_counter(counter: Counter[str], amount: int | None = None) -> None: for name, count in counter.most_common(amount): print(f" {count: <8} {name}") print() @@ -89,7 +89,7 @@ def compress(chunk: JsonDict) -> JsonDict: cache: dict[int, JsonDict] = {} counter = 0 - def helper(chunk: Any) -> Any: + def helper(chunk: JsonDict) -> JsonDict: nonlocal counter if not isinstance(chunk, dict): return chunk @@ -121,7 +121,7 @@ def helper(chunk: Any) -> Any: def decompress(chunk: JsonDict) -> JsonDict: cache: dict[int, JsonDict] = {} - def helper(chunk: Any) -> Any: + def helper(chunk: JsonDict) -> JsonDict: if not isinstance(chunk, dict): return chunk if ".id" in chunk: @@ -167,6 +167,7 @@ def main() -> None: if "build.*.json" in chunk.filename: build = chunk break + assert build is not None original = json.dumps(build.data, sort_keys=True) print(f"Size of build.data.json, in kilobytes: {len(original) / 1024:.3f}") diff --git a/misc/async_matrix.py b/misc/async_matrix.py index ba04fc390069..d4612dd81799 100644 --- a/misc/async_matrix.py +++ b/misc/async_matrix.py @@ -70,7 +70,7 @@ def plain_host_generator(func) -> Generator[str, None, None]: x = 0 f = func() try: - x = yield from f + x = yield from f # noqa: F841 finally: try: f.close() @@ -80,7 +80,7 @@ def plain_host_generator(func) -> Generator[str, None, None]: async def plain_host_coroutine(func) -> None: x = 0 - x = await func() + x = await func() # noqa: F841 @coroutine @@ -89,7 +89,7 @@ def decorated_host_generator(func) -> Generator[str, None, None]: x = 0 f = func() try: - x = yield from f + x = yield from f # noqa: F841 finally: try: f.close() @@ -100,13 +100,13 @@ def decorated_host_generator(func) -> Generator[str, None, None]: @coroutine async def decorated_host_coroutine(func) -> None: x = 0 - x = await func() + x = await func() # noqa: F841 # Main driver. -def main(): +def main() -> None: verbose = "-v" in sys.argv for host in [ plain_host_generator, diff --git a/misc/build-debug-python.sh b/misc/build-debug-python.sh index 2f32a46ce885..f652d6ad9937 100755 --- a/misc/build-debug-python.sh +++ b/misc/build-debug-python.sh @@ -1,7 +1,7 @@ #!/bin/bash -eux # Build a debug build of python, install it, and create a venv for it -# This is mainly intended for use in our travis builds but it can work +# This is mainly intended for use in our github actions builds but it can work # locally. (Though it unfortunately uses brew on OS X to deal with openssl # nonsense.) # Usage: build-debug-python.sh diff --git a/misc/cherry-pick-typeshed.py b/misc/cherry-pick-typeshed.py index 3cf826533a94..af08009c2a8f 100644 --- a/misc/cherry-pick-typeshed.py +++ b/misc/cherry-pick-typeshed.py @@ -37,7 +37,7 @@ def main() -> None: sys.exit(f"error: Invalid commit {commit!r}") if not os.path.exists("mypy") or not os.path.exists("mypyc"): - sys.exit(f"error: This script must be run at the mypy repository root directory") + sys.exit("error: This script must be run at the mypy repository root directory") with tempfile.TemporaryDirectory() as d: diff_file = os.path.join(d, "diff") diff --git a/misc/convert-cache.py b/misc/convert-cache.py index 92a313c6f2a0..e5da9c2650d5 100755 --- a/misc/convert-cache.py +++ b/misc/convert-cache.py @@ -14,7 +14,7 @@ import argparse -from mypy.metastore import FilesystemMetadataStore, SqliteMetadataStore +from mypy.metastore import FilesystemMetadataStore, MetadataStore, SqliteMetadataStore def main() -> None: @@ -37,7 +37,8 @@ def main() -> None: input_dir = args.input_dir output_dir = args.output_dir or input_dir if args.to_sqlite: - input, output = FilesystemMetadataStore(input_dir), SqliteMetadataStore(output_dir) + input: MetadataStore = FilesystemMetadataStore(input_dir) + output: MetadataStore = SqliteMetadataStore(output_dir) else: input, output = SqliteMetadataStore(input_dir), FilesystemMetadataStore(output_dir) diff --git a/misc/docker/Dockerfile b/misc/docker/Dockerfile new file mode 100644 index 000000000000..3327f9e38815 --- /dev/null +++ b/misc/docker/Dockerfile @@ -0,0 +1,12 @@ +FROM ubuntu:latest + +WORKDIR /mypy + +RUN apt-get update +RUN apt-get install -y python3 python3-pip clang + +COPY mypy-requirements.txt . +COPY test-requirements.txt . +COPY build-requirements.txt . + +RUN pip3 install -r test-requirements.txt diff --git a/misc/docker/README.md b/misc/docker/README.md new file mode 100644 index 000000000000..839f9761cb03 --- /dev/null +++ b/misc/docker/README.md @@ -0,0 +1,101 @@ +Running mypy and mypyc tests in a Docker container +================================================== + +This directory contains scripts for running mypy and mypyc tests in a +Linux Docker container. This allows running Linux tests on a different +operating system that supports Docker, or running tests in an +isolated, predictable environment on a Linux host operating system. + +Why use Docker? +--------------- + +Mypyc tests can be significantly faster in a Docker container than +running natively on macOS. + +Also, if it's inconvient to install the necessary dependencies on the +host operating system, or there are issues getting some tests to pass +on the host operating system, using a container can be an easy +workaround. + +Prerequisites +------------- + +First install Docker. On macOS, both Docker Desktop (proprietary, but +with a free of charge subscription for some use cases) and Colima (MIT +license) should work as runtimes. + +You may have to explicitly start the runtime first. Colima example +(replace '8' with the number of CPU cores you have): + +``` +$ colima start -c 8 + +``` + +How to run tests +---------------- + +You need to build the container with all necessary dependencies before +you can run tests: + +``` +$ python3 misc/docker/build.py +``` + +This creates a `mypy-test` Docker container that you can use to run +tests. + +You may need to run the script as root: + +``` +$ sudo python3 misc/docker/build.py +``` + +If you have a stale container which isn't up-to-date, use `--no-cache` +`--pull` to force rebuilding everything: + +``` +$ python3 misc/docker/build.py --no-cache --pull +``` + +Now you can run tests by using the `misc/docker/run.sh` script. Give +it the pytest command line you want to run as arguments. For example, +you can run mypyc tests like this: + +``` +$ misc/docker/run.sh pytest mypyc +``` + +You can also use `-k `, `-n0`, `-q`, etc. + +Again, you may need to run `run.sh` as root: + +``` +$ sudo misc/docker/run.sh pytest mypyc +``` + +You can also use `runtests.py` in the container. Example: + +``` +$ misc/docker/run.sh ./runtests.py self lint +``` + +Notes +----- + +File system changes within the container are not visible to the host +system. You can't use the container to format code using Black, for +example. + +On a mac, you may want to give additional CPU to the VM used to run +the container. The default allocation may be way too low (e.g. 2 CPU +cores). For example, use the `-c` option when starting the VM if you +use Colima: + +``` +$ colima start -c 8 +``` + +Giving access to all available CPUs to the Linux VM tends to provide +the best performance. This is not needed on a Linux host, since the +container is not run in a VM. diff --git a/misc/docker/build.py b/misc/docker/build.py new file mode 100644 index 000000000000..2103be3f110f --- /dev/null +++ b/misc/docker/build.py @@ -0,0 +1,46 @@ +"""Build a "mypy-test" Linux Docker container for running mypy/mypyc tests. + +This allows running Linux tests under a non-Linux operating system. Mypyc +tests can also run much faster under Linux that the host OS. + +NOTE: You may need to run this as root (using sudo). + +Run with "--no-cache" to force reinstallation of mypy dependencies. +Run with "--pull" to force update of the Linux (Ubuntu) base image. + +After you've built the container, use "run.sh" to run tests. Example: + + misc/docker/run.sh pytest mypyc/ +""" + +import argparse +import os +import subprocess +import sys + + +def main() -> None: + parser = argparse.ArgumentParser( + description="""Build a 'mypy-test' Docker container for running mypy/mypyc tests. You may + need to run this as root (using sudo).""" + ) + parser.add_argument("--no-cache", action="store_true", help="Force rebuilding") + parser.add_argument("--pull", action="store_true", help="Force pulling fresh Linux base image") + args = parser.parse_args() + + dockerdir = os.path.dirname(os.path.abspath(__file__)) + dockerfile = os.path.join(dockerdir, "Dockerfile") + rootdir = os.path.join(dockerdir, "..", "..") + + cmdline = ["docker", "build", "-t", "mypy-test", "-f", dockerfile] + if args.no_cache: + cmdline.append("--no-cache") + if args.pull: + cmdline.append("--pull") + cmdline.append(rootdir) + result = subprocess.run(cmdline) + sys.exit(result.returncode) + + +if __name__ == "__main__": + main() diff --git a/misc/docker/run-wrapper.sh b/misc/docker/run-wrapper.sh new file mode 100755 index 000000000000..77e77d99af34 --- /dev/null +++ b/misc/docker/run-wrapper.sh @@ -0,0 +1,13 @@ +#!/bin/bash +# Internal wrapper script used to run commands in a container + +# Copy all the files we need from the mypy repo directory shared with +# the host to a local directory. Accessing files using a shared +# directory on a mac can be *very* slow. +echo "copying files to the container..." +cp -R /repo/{mypy,mypyc,test-data,misc} . +cp /repo/{pytest.ini,conftest.py,runtests.py,pyproject.toml,setup.cfg} . +cp /repo/{mypy_self_check.ini,mypy_bootstrap.ini} . + +# Run the wrapped command +"$@" diff --git a/misc/docker/run.sh b/misc/docker/run.sh new file mode 100755 index 000000000000..c8fc0e510e8e --- /dev/null +++ b/misc/docker/run.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# Run mypy or mypyc tests in a Docker container that was built using misc/docker/build.py. +# +# Usage: misc/docker/run.sh ... +# +# For example, run mypyc tests like this: +# +# misc/docker/run.sh pytest mypyc +# +# NOTE: You may need to run this as root (using sudo). + +SCRIPT_DIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) +MYPY_DIR="$SCRIPT_DIR/../.." + +docker run -ti --rm -v "$MYPY_DIR:/repo" mypy-test /repo/misc/docker/run-wrapper.sh "$@" diff --git a/scripts/find_type.py b/misc/find_type.py similarity index 95% rename from scripts/find_type.py rename to misc/find_type.py index 7bded322e6e5..0031c72aea9f 100755 --- a/scripts/find_type.py +++ b/misc/find_type.py @@ -17,7 +17,7 @@ # " Convert to 0-based column offsets # let startcol = startcol - 1 # " Change this line to point to the find_type.py script. -# execute '!python3 /path/to/mypy/scripts/find_type.py % ' . startline . ' ' . startcol . ' ' . endline . ' ' . endcol . ' ' . mypycmd +# execute '!python3 /path/to/mypy/misc/find_type.py % ' . startline . ' ' . startcol . ' ' . endline . ' ' . endcol . ' ' . mypycmd # endfunction # vnoremap t :call RevealType() # @@ -68,7 +68,7 @@ def process_output(output: str, filename: str, start_line: int) -> tuple[str | N return None, True # finding no reveal_type is an error -def main(): +def main() -> None: filename, start_line_str, start_col_str, end_line_str, end_col_str, *mypy_and_args = sys.argv[ 1: ] diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py index 7148b69259be..7fffba8a8507 100644 --- a/misc/fix_annotate.py +++ b/misc/fix_annotate.py @@ -72,12 +72,12 @@ def transform(self, node, results): # # "Compact" functions (e.g. "def foo(x, y): return max(x, y)") # have a different structure that isn't matched by PATTERN. - - ## print('-'*60) - ## print(node) - ## for i, ch in enumerate(children): - ## print(i, repr(ch.prefix), repr(ch)) - + # + # print('-'*60) + # print(node) + # for i, ch in enumerate(children): + # print(i, repr(ch.prefix), repr(ch)) + # # Check if there's already an annotation. for ch in children: if ch.prefix.lstrip().startswith("# type:"): @@ -213,8 +213,7 @@ def has_return_exprs(self, node): results = {} if self.return_expr.match(node, results): return True - for child in node.children: - if child.type not in (syms.funcdef, syms.classdef): - if self.has_return_exprs(child): - return True - return False + return any( + child.type not in (syms.funcdef, syms.classdef) and self.has_return_exprs(child) + for child in node.children + ) diff --git a/misc/incremental_checker.py b/misc/incremental_checker.py index 12dc37e2f05e..85239b6462b8 100755 --- a/misc/incremental_checker.py +++ b/misc/incremental_checker.py @@ -44,8 +44,8 @@ import textwrap import time from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter -from typing import Any, Dict, Tuple -from typing_extensions import TypeAlias as _TypeAlias +from typing import Any, Dict +from typing_extensions import Final, TypeAlias as _TypeAlias CACHE_PATH: Final = ".incremental_checker_cache.json" MYPY_REPO_URL: Final = "https://github.com/python/mypy.git" @@ -70,7 +70,7 @@ def execute(command: list[str], fail_on_error: bool = True) -> tuple[str, str, i proc = subprocess.Popen( " ".join(command), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True ) - stdout_bytes, stderr_bytes = proc.communicate() # type: Tuple[bytes, bytes] + stdout_bytes, stderr_bytes = proc.communicate() stdout, stderr = stdout_bytes.decode("utf-8"), stderr_bytes.decode("utf-8") if fail_on_error and proc.returncode != 0: print("EXECUTED COMMAND:", repr(command)) @@ -197,7 +197,9 @@ def stop_daemon() -> None: def load_cache(incremental_cache_path: str = CACHE_PATH) -> JsonDict: if os.path.exists(incremental_cache_path): with open(incremental_cache_path) as stream: - return json.load(stream) + cache = json.load(stream) + assert isinstance(cache, dict) + return cache else: return {} @@ -405,7 +407,7 @@ def main() -> None: parser.add_argument( "range_start", metavar="COMMIT_ID_OR_NUMBER", - help="the commit id to start from, or the number of " "commits to move back (see above)", + help="the commit id to start from, or the number of commits to move back (see above)", ) parser.add_argument( "-r", @@ -437,7 +439,7 @@ def main() -> None: "--branch", default=None, metavar="NAME", - help="check out and test a custom branch" "uses the default if not specified", + help="check out and test a custom branch uses the default if not specified", ) parser.add_argument("--sample", type=int, help="use a random sample of size SAMPLE") parser.add_argument("--seed", type=str, help="random seed") diff --git a/misc/macs.el b/misc/macs.el index 67d80aa575b0..f4cf6702b989 100644 --- a/misc/macs.el +++ b/misc/macs.el @@ -11,7 +11,7 @@ (thereline (line-number-at-pos there)) (therecol (save-excursion (goto-char there) (current-column)))) (shell-command - (format "cd ~/src/mypy; python3 ./scripts/find_type.py %s %s %s %s %s python3 -m mypy -i mypy" + (format "cd ~/src/mypy; python3 ./misc/find_type.py %s %s %s %s %s python3 -m mypy -i mypy" filename hereline herecol thereline therecol) ) ) diff --git a/misc/perf_checker.py b/misc/perf_checker.py index 52095f9fe052..20c313e61af9 100644 --- a/misc/perf_checker.py +++ b/misc/perf_checker.py @@ -8,7 +8,7 @@ import subprocess import textwrap import time -from typing import Callable, Tuple +from typing import Callable class Command: @@ -32,7 +32,7 @@ def execute(command: list[str]) -> None: proc = subprocess.Popen( " ".join(command), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True ) - stdout_bytes, stderr_bytes = proc.communicate() # type: Tuple[bytes, bytes] + stdout_bytes, stderr_bytes = proc.communicate() stdout, stderr = stdout_bytes.decode("utf-8"), stderr_bytes.decode("utf-8") if proc.returncode != 0: print("EXECUTED COMMAND:", repr(command)) diff --git a/misc/perf_compare.py b/misc/perf_compare.py new file mode 100644 index 000000000000..be05bb6ddc32 --- /dev/null +++ b/misc/perf_compare.py @@ -0,0 +1,146 @@ +"""Compare performance of mypyc-compiled mypy between one or more commits/branches. + +Simple usage: + + python misc/perf_compare.py my-branch master ... + +What this does: + + * Create a temp clone of the mypy repo for each target commit to measure + * Checkout a target commit in each of the clones + * Compile mypyc in each of the clones *in parallel* + * Create another temp clone of the mypy repo as the code to check + * Self check with each of the compiled mypys N times + * Report the average runtimes and relative performance + * Remove the temp clones +""" + +from __future__ import annotations + +import argparse +import glob +import os +import random +import shutil +import statistics +import subprocess +import sys +import threading +import time + + +def heading(s: str) -> None: + print() + print(f"=== {s} ===") + print() + + +def build_mypy(target_dir: str) -> None: + env = os.environ.copy() + env["CC"] = "clang" + env["MYPYC_OPT_LEVEL"] = "2" + cmd = [sys.executable, "setup.py", "--use-mypyc", "build_ext", "--inplace"] + subprocess.run(cmd, env=env, check=True, cwd=target_dir) + + +def clone(target_dir: str, commit: str | None) -> None: + heading(f"Cloning mypy to {target_dir}") + repo_dir = os.getcwd() + if os.path.isdir(target_dir): + print(f"{target_dir} exists: deleting") + shutil.rmtree(target_dir) + subprocess.run(["git", "clone", repo_dir, target_dir], check=True) + if commit: + subprocess.run(["git", "checkout", commit], check=True, cwd=target_dir) + + +def run_benchmark(compiled_dir: str, check_dir: str) -> float: + cache_dir = os.path.join(compiled_dir, ".mypy_cache") + if os.path.isdir(cache_dir): + shutil.rmtree(cache_dir) + env = os.environ.copy() + env["PYTHONPATH"] = os.path.abspath(compiled_dir) + abschk = os.path.abspath(check_dir) + cmd = [ + sys.executable, + "-m", + "mypy", + "--config-file", + os.path.join(abschk, "mypy_self_check.ini"), + ] + cmd += glob.glob(os.path.join(abschk, "mypy/*.py")) + cmd += glob.glob(os.path.join(abschk, "mypy/*/*.py")) + t0 = time.time() + # Ignore errors, since some commits being measured may generate additional errors. + subprocess.run(cmd, cwd=compiled_dir, env=env) + return time.time() - t0 + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument("commit", nargs="+") + args = parser.parse_args() + commits = args.commit + num_runs = 16 + + if not (os.path.isdir(".git") and os.path.isdir("mypyc")): + sys.exit("error: Run this the mypy repo root") + + build_threads = [] + target_dirs = [] + for i, commit in enumerate(commits): + target_dir = f"mypy.{i}.tmpdir" + target_dirs.append(target_dir) + clone(target_dir, commit) + t = threading.Thread(target=lambda: build_mypy(target_dir)) + t.start() + build_threads.append(t) + + self_check_dir = "mypy.self.tmpdir" + clone(self_check_dir, commits[0]) + + heading("Compiling mypy") + print("(This will take a while...)") + + for t in build_threads: + t.join() + + print(f"Finished compiling mypy ({len(commits)} builds)") + + heading("Performing measurements") + + results: dict[str, list[float]] = {} + for n in range(num_runs): + if n == 0: + print("Warmup...") + else: + print(f"Run {n}/{num_runs - 1}...") + items = list(enumerate(commits)) + random.shuffle(items) + for i, commit in items: + tt = run_benchmark(target_dirs[i], self_check_dir) + # Don't record the first warm-up run + if n > 0: + print(f"{commit}: t={tt:.3f}s") + results.setdefault(commit, []).append(tt) + + print() + heading("Results") + first = -1.0 + for commit in commits: + tt = statistics.mean(results[commit]) + if first < 0: + delta = "0.0%" + first = tt + else: + d = (tt / first) - 1 + delta = f"{d:+.1%}" + print(f"{commit:<25} {tt:.3f}s ({delta})") + + shutil.rmtree(self_check_dir) + for target_dir in target_dirs: + shutil.rmtree(target_dir) + + +if __name__ == "__main__": + main() diff --git a/misc/proper_plugin.py b/misc/proper_plugin.py index 75f6417a3574..a8a8e80ef360 100644 --- a/misc/proper_plugin.py +++ b/misc/proper_plugin.py @@ -1,7 +1,8 @@ from __future__ import annotations -from typing import Callable, Type as typing_Type +from typing import Callable +from mypy.checker import TypeChecker from mypy.nodes import TypeInfo from mypy.plugin import FunctionContext, Plugin from mypy.subtypes import is_proper_subtype @@ -50,10 +51,8 @@ def isinstance_proper_hook(ctx: FunctionContext) -> Type: right = get_proper_type(ctx.arg_types[1][0]) for arg in ctx.arg_types[0]: if ( - is_improper_type(arg) - or isinstance(get_proper_type(arg), AnyType) - and is_dangerous_target(right) - ): + is_improper_type(arg) or isinstance(get_proper_type(arg), AnyType) + ) and is_dangerous_target(right): if is_special_target(right): return ctx.default_return_type ctx.api.fail( @@ -155,11 +154,13 @@ def proper_types_hook(ctx: FunctionContext) -> Type: def get_proper_type_instance(ctx: FunctionContext) -> Instance: - types = ctx.api.modules["mypy.types"] # type: ignore + checker = ctx.api + assert isinstance(checker, TypeChecker) + types = checker.modules["mypy.types"] proper_type_info = types.names["ProperType"] assert isinstance(proper_type_info.node, TypeInfo) return Instance(proper_type_info.node, []) -def plugin(version: str) -> typing_Type[ProperTypePlugin]: +def plugin(version: str) -> type[ProperTypePlugin]: return ProperTypePlugin diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 05202b989585..86b0fd774e0c 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -10,16 +10,21 @@ from __future__ import annotations import argparse +import functools import os +import re import shutil import subprocess import sys import tempfile import textwrap +from collections.abc import Mapping + +import requests def check_state() -> None: - if not os.path.isfile("README.md"): + if not os.path.isfile("pyproject.toml") or not os.path.isdir("mypy"): sys.exit("error: The current working directory must be the mypy repository root") out = subprocess.check_output(["git", "status", "-s", os.path.join("mypy", "typeshed")]) if out: @@ -30,27 +35,22 @@ def check_state() -> None: def update_typeshed(typeshed_dir: str, commit: str | None) -> str: """Update contents of local typeshed copy. + We maintain our own separate mypy_extensions stubs, since it's + treated specially by mypy and we make assumptions about what's there. + We don't sync mypy_extensions stubs here -- this is done manually. + Return the normalized typeshed commit hash. """ assert os.path.isdir(os.path.join(typeshed_dir, "stdlib")) - assert os.path.isdir(os.path.join(typeshed_dir, "stubs")) if commit: subprocess.run(["git", "checkout", commit], check=True, cwd=typeshed_dir) commit = git_head_commit(typeshed_dir) + stdlib_dir = os.path.join("mypy", "typeshed", "stdlib") # Remove existing stubs. shutil.rmtree(stdlib_dir) # Copy new stdlib stubs. shutil.copytree(os.path.join(typeshed_dir, "stdlib"), stdlib_dir) - # Copy mypy_extensions stubs. We don't want to use a stub package, since it's - # treated specially by mypy and we make assumptions about what's there. - stubs_dir = os.path.join("mypy", "typeshed", "stubs") - shutil.rmtree(stubs_dir) - os.makedirs(stubs_dir) - shutil.copytree( - os.path.join(typeshed_dir, "stubs", "mypy-extensions"), - os.path.join(stubs_dir, "mypy-extensions"), - ) shutil.copy(os.path.join(typeshed_dir, "LICENSE"), os.path.join("mypy", "typeshed")) return commit @@ -60,24 +60,96 @@ def git_head_commit(repo: str) -> str: return commit.strip() +@functools.cache +def get_github_api_headers() -> Mapping[str, str]: + headers = {"Accept": "application/vnd.github.v3+json"} + secret = os.environ.get("GITHUB_TOKEN") + if secret is not None: + headers["Authorization"] = ( + f"token {secret}" if secret.startswith("ghp") else f"Bearer {secret}" + ) + return headers + + +@functools.cache +def get_origin_owner() -> str: + output = subprocess.check_output(["git", "remote", "get-url", "origin"], text=True).strip() + match = re.match( + r"(git@github.com:|https://github.com/)(?P[^/]+)/(?P[^/\s]+)", output + ) + assert match is not None, f"Couldn't identify origin's owner: {output!r}" + assert ( + match.group("repo").removesuffix(".git") == "mypy" + ), f'Unexpected repo: {match.group("repo")!r}' + return match.group("owner") + + +def create_or_update_pull_request(*, title: str, body: str, branch_name: str) -> None: + fork_owner = get_origin_owner() + + with requests.post( + "https://api.github.com/repos/python/mypy/pulls", + json={ + "title": title, + "body": body, + "head": f"{fork_owner}:{branch_name}", + "base": "master", + }, + headers=get_github_api_headers(), + ) as response: + resp_json = response.json() + if response.status_code == 422 and any( + "A pull request already exists" in e.get("message", "") + for e in resp_json.get("errors", []) + ): + # Find the existing PR + with requests.get( + "https://api.github.com/repos/python/mypy/pulls", + params={"state": "open", "head": f"{fork_owner}:{branch_name}", "base": "master"}, + headers=get_github_api_headers(), + ) as response: + response.raise_for_status() + resp_json = response.json() + assert len(resp_json) >= 1 + pr_number = resp_json[0]["number"] + # Update the PR's title and body + with requests.patch( + f"https://api.github.com/repos/python/mypy/pulls/{pr_number}", + json={"title": title, "body": body}, + headers=get_github_api_headers(), + ) as response: + response.raise_for_status() + return + response.raise_for_status() + + def main() -> None: parser = argparse.ArgumentParser() parser.add_argument( "--commit", default=None, - help="Typeshed commit (default to latest master if using a repository clone)", + help="Typeshed commit (default to latest main if using a repository clone)", ) parser.add_argument( "--typeshed-dir", default=None, help="Location of typeshed (default to a temporary repository clone)", ) + parser.add_argument( + "--make-pr", + action="store_true", + help="Whether to make a PR with the changes (default to no)", + ) args = parser.parse_args() + check_state() - print("Update contents of mypy/typeshed from typeshed? [yN] ", end="") - answer = input() - if answer.lower() != "y": - sys.exit("Aborting") + + if args.make_pr: + if os.environ.get("GITHUB_TOKEN") is None: + raise ValueError("GITHUB_TOKEN environment variable must be set") + + branch_name = "mypybot/sync-typeshed" + subprocess.run(["git", "checkout", "-B", branch_name, "origin/master"], check=True) if not args.typeshed_dir: # Clone typeshed repo if no directory given. @@ -95,19 +167,37 @@ def main() -> None: # Create a commit message = textwrap.dedent( - """\ + f"""\ Sync typeshed Source commit: https://github.com/python/typeshed/commit/{commit} - """.format( - commit=commit - ) + """ ) subprocess.run(["git", "add", "--all", os.path.join("mypy", "typeshed")], check=True) subprocess.run(["git", "commit", "-m", message], check=True) print("Created typeshed sync commit.") + commits_to_cherry_pick = [ + "874afd970", # LiteralString reverts + "3a240111e", # sum reverts + "f968d6ce0", # ctypes reverts + ] + for commit in commits_to_cherry_pick: + subprocess.run(["git", "cherry-pick", commit], check=True) + print(f"Cherry-picked {commit}.") + + if args.make_pr: + subprocess.run(["git", "push", "--force", "origin", branch_name], check=True) + print("Pushed commit.") + + warning = "Note that you will need to close and re-open the PR in order to trigger CI." + + create_or_update_pull_request( + title="Sync typeshed", body=message + "\n" + warning, branch_name=branch_name + ) + print("Created PR.") + if __name__ == "__main__": main() diff --git a/misc/test_case_to_actual.py b/misc/test_case_to_actual.py deleted file mode 100644 index 92d11866ef9d..000000000000 --- a/misc/test_case_to_actual.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import annotations - -import os -import os.path -import sys -from typing import Iterator - - -class Chunk: - def __init__(self, header_type: str, args: str) -> None: - self.header_type = header_type - self.args = args - self.lines: list[str] = [] - - -def is_header(line: str) -> bool: - return line.startswith("[") and line.endswith("]") - - -def normalize(lines: Iterator[str]) -> Iterator[str]: - return (line.rstrip() for line in lines) - - -def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]: - current_chunk: Chunk = None - for line in normalize(lines): - if is_header(line): - if current_chunk is not None: - yield current_chunk - parts = line[1:-1].split(" ", 1) - args = parts[1] if len(parts) > 1 else "" - current_chunk = Chunk(parts[0], args) - else: - current_chunk.lines.append(line) - if current_chunk is not None: - yield current_chunk - - -def write_out(filename: str, lines: list[str]) -> None: - os.makedirs(os.path.dirname(filename), exist_ok=True) - with open(filename, "w") as stream: - stream.write("\n".join(lines)) - - -def write_tree(root: str, chunks: Iterator[Chunk]) -> None: - init = next(chunks) - assert init.header_type == "case" - - root = os.path.join(root, init.args) - write_out(os.path.join(root, "main.py"), init.lines) - - for chunk in chunks: - if chunk.header_type == "file" and chunk.args.endswith(".py"): - write_out(os.path.join(root, chunk.args), chunk.lines) - - -def help() -> None: - print("Usage: python misc/test_case_to_actual.py test_file.txt root_path") - - -def main() -> None: - if len(sys.argv) != 3: - help() - return - - test_file_path, root_path = sys.argv[1], sys.argv[2] - with open(test_file_path) as stream: - chunks = produce_chunks(iter(stream)) - write_tree(root_path, chunks) - - -if __name__ == "__main__": - main() diff --git a/misc/touch_checker.py b/misc/touch_checker.py deleted file mode 100644 index 2adcacc3af9a..000000000000 --- a/misc/touch_checker.py +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import annotations - -import glob -import os -import shutil -import statistics -import subprocess -import sys -import textwrap -import time -from typing import Callable, Tuple - - -def print_offset(text: str, indent_length: int = 4) -> None: - print() - print(textwrap.indent(text, " " * indent_length)) - print() - - -def delete_folder(folder_path: str) -> None: - if os.path.exists(folder_path): - shutil.rmtree(folder_path) - - -def execute(command: list[str]) -> None: - proc = subprocess.Popen( - " ".join(command), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True - ) - stdout_bytes, stderr_bytes = proc.communicate() # type: Tuple[bytes, bytes] - stdout, stderr = stdout_bytes.decode("utf-8"), stderr_bytes.decode("utf-8") - if proc.returncode != 0: - print("EXECUTED COMMAND:", repr(command)) - print("RETURN CODE:", proc.returncode) - print() - print("STDOUT:") - print_offset(stdout) - print("STDERR:") - print_offset(stderr) - print() - - -Command = Callable[[], None] - - -def test(setup: Command, command: Command, teardown: Command) -> float: - setup() - start = time.time() - command() - end = time.time() - start - teardown() - return end - - -def make_touch_wrappers(filename: str) -> tuple[Command, Command]: - def setup() -> None: - execute(["touch", filename]) - - def teardown() -> None: - pass - - return setup, teardown - - -def make_change_wrappers(filename: str) -> tuple[Command, Command]: - copy: str | None = None - - def setup() -> None: - nonlocal copy - with open(filename) as stream: - copy = stream.read() - with open(filename, "a") as stream: - stream.write("\n\nfoo = 3") - - def teardown() -> None: - assert copy is not None - with open(filename, "w") as stream: - stream.write(copy) - - # Re-run to reset cache - execute(["python3", "-m", "mypy", "-i", "mypy"]), - - return setup, teardown - - -def main() -> None: - if len(sys.argv) != 2 or sys.argv[1] not in {"touch", "change"}: - print("First argument should be 'touch' or 'change'") - return - - if sys.argv[1] == "touch": - make_wrappers = make_touch_wrappers - verb = "Touching" - elif sys.argv[1] == "change": - make_wrappers = make_change_wrappers - verb = "Changing" - else: - raise AssertionError() - - print("Setting up...") - - baseline = test(lambda: None, lambda: execute(["python3", "-m", "mypy", "mypy"]), lambda: None) - print(f"Baseline: {baseline}") - - cold = test( - lambda: delete_folder(".mypy_cache"), - lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]), - lambda: None, - ) - print(f"Cold cache: {cold}") - - warm = test( - lambda: None, lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]), lambda: None - ) - print(f"Warm cache: {warm}") - - print() - - deltas = [] - for filename in glob.iglob("mypy/**/*.py", recursive=True): - print(f"{verb} {filename}") - - setup, teardown = make_wrappers(filename) - delta = test(setup, lambda: execute(["python3", "-m", "mypy", "-i", "mypy"]), teardown) - print(f" Time: {delta}") - deltas.append(delta) - print() - - print("Initial:") - print(f" Baseline: {baseline}") - print(f" Cold cache: {cold}") - print(f" Warm cache: {warm}") - print() - print("Aggregate:") - print(f" Times: {deltas}") - print(f" Mean: {statistics.mean(deltas)}") - print(f" Median: {statistics.median(deltas)}") - print(f" Stdev: {statistics.stdev(deltas)}") - print(f" Min: {min(deltas)}") - print(f" Max: {max(deltas)}") - print(f" Total: {sum(deltas)}") - print() - - -if __name__ == "__main__": - main() diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py index 4d18b7d78ade..9d8827c5e46c 100644 --- a/misc/upload-pypi.py +++ b/misc/upload-pypi.py @@ -29,15 +29,32 @@ def is_whl_or_tar(name: str) -> bool: return name.endswith(".tar.gz") or name.endswith(".whl") +def item_ok_for_pypi(name: str) -> bool: + if not is_whl_or_tar(name): + return False + + if name.endswith(".tar.gz"): + name = name[:-7] + if name.endswith(".whl"): + name = name[:-4] + + if name.endswith("wasm32"): + return False + + return True + + def get_release_for_tag(tag: str) -> dict[str, Any]: with urlopen(f"{BASE}/{REPO}/releases/tags/{tag}") as f: data = json.load(f) + assert isinstance(data, dict) assert data["tag_name"] == tag return data def download_asset(asset: dict[str, Any], dst: Path) -> Path: name = asset["name"] + assert isinstance(name, str) download_url = asset["browser_download_url"] assert is_whl_or_tar(name) with urlopen(download_url) as src_file: @@ -47,7 +64,7 @@ def download_asset(asset: dict[str, Any], dst: Path) -> Path: def download_all_release_assets(release: dict[str, Any], dst: Path) -> None: - print(f"Downloading assets...") + print("Downloading assets...") with ThreadPoolExecutor() as e: for asset in e.map(lambda asset: download_asset(asset, dst), release["assets"]): print(f"Downloaded {asset}") @@ -68,12 +85,12 @@ def check_sdist(dist: Path, version: str) -> None: hashless_version = match.group(1) if match else version assert ( - f"'{hashless_version}'" in version_py_contents + f'"{hashless_version}"' in version_py_contents ), "Version does not match version.py in sdist" def spot_check_dist(dist: Path, version: str) -> None: - items = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + items = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] assert len(items) > 10 assert all(version in item.name for item in items) assert any(item.name.endswith("py3-none-any.whl") for item in items) @@ -91,7 +108,7 @@ def tmp_twine() -> Iterator[Path]: def upload_dist(dist: Path, dry_run: bool = True) -> None: with tmp_twine() as twine: - files = [item for item in dist.iterdir() if is_whl_or_tar(item.name)] + files = [item for item in dist.iterdir() if item_ok_for_pypi(item.name)] cmd: list[Any] = [twine, "upload"] cmd += files if dry_run: @@ -102,7 +119,7 @@ def upload_dist(dist: Path, dry_run: bool = True) -> None: def upload_to_pypi(version: str, dry_run: bool = True) -> None: - assert re.match(r"v?0\.[0-9]{3}(\+\S+)?$", version) + assert re.match(r"v?[1-9]\.[0-9]+\.[0-9](\+\S+)?$", version) if "dev" in version: assert dry_run, "Must use --dry-run with dev versions of mypy" if version.startswith("v"): diff --git a/misc/variadics.py b/misc/variadics.py deleted file mode 100644 index c54e3fd8e30e..000000000000 --- a/misc/variadics.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Example of code generation approach to variadics. - -See https://github.com/python/typing/issues/193#issuecomment-236383893 -""" - -from __future__ import annotations - -LIMIT = 5 -BOUND = "object" - - -def prelude(limit: int, bound: str) -> None: - print("from typing import Callable, Iterable, Iterator, Tuple, TypeVar, overload") - print(f"Ts = TypeVar('Ts', bound={bound})") - print("R = TypeVar('R')") - for i in range(LIMIT): - print("T{i} = TypeVar('T{i}', bound={bound})".format(i=i + 1, bound=bound)) - - -def expand_template( - template: str, arg_template: str = "arg{i}: {Ts}", lower: int = 0, limit: int = LIMIT -) -> None: - print() - for i in range(lower, limit): - tvs = ", ".join(f"T{j+1}" for j in range(i)) - args = ", ".join(arg_template.format(i=j + 1, Ts=f"T{j+1}") for j in range(i)) - print("@overload") - s = template.format(Ts=tvs, argsTs=args) - s = s.replace("Tuple[]", "Tuple[()]") - print(s) - args_l = [arg_template.format(i=j + 1, Ts="Ts") for j in range(limit)] - args_l.append("*" + (arg_template.format(i="s", Ts="Ts"))) - args = ", ".join(args_l) - s = template.format(Ts="Ts, ...", argsTs=args) - s = s.replace("Callable[[Ts, ...]", "Callable[...") - print("@overload") - print(s) - - -def main(): - prelude(LIMIT, BOUND) - - # map() - expand_template("def map(func: Callable[[{Ts}], R], {argsTs}) -> R: ...", lower=1) - # zip() - expand_template("def zip({argsTs}) -> Tuple[{Ts}]: ...") - - # Naomi's examples - expand_template("def my_zip({argsTs}) -> Iterator[Tuple[{Ts}]]: ...", "arg{i}: Iterable[{Ts}]") - expand_template("def make_check({argsTs}) -> Callable[[{Ts}], bool]: ...") - expand_template( - "def my_map(f: Callable[[{Ts}], R], {argsTs}) -> Iterator[R]: ...", - "arg{i}: Iterable[{Ts}]", - ) - - -main() diff --git a/mypy-requirements.txt b/mypy-requirements.txt index 1c372294383d..9a55446eb05a 100644 --- a/mypy-requirements.txt +++ b/mypy-requirements.txt @@ -1,4 +1,5 @@ +# NOTE: this needs to be kept in sync with the "requires" list in pyproject.toml typing_extensions>=3.10 -mypy_extensions>=0.4.3 +mypy_extensions>=1.0.0 typed_ast>=1.4.0,<2; python_version<'3.8' tomli>=1.1.0; python_version<'3.11' diff --git a/mypy/api.py b/mypy/api.py index 18b92fe82064..589bfbbfa1a7 100644 --- a/mypy/api.py +++ b/mypy/api.py @@ -47,7 +47,7 @@ import sys from io import StringIO -from typing import Callable, TextIO +from typing import Callable, TextIO, cast def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> tuple[str, str, int]: @@ -59,7 +59,7 @@ def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> tuple[str, str, int] main_wrapper(stdout, stderr) exit_status = 0 except SystemExit as system_exit: - exit_status = system_exit.code + exit_status = cast(int, system_exit.code) return stdout.getvalue(), stderr.getvalue(), exit_status diff --git a/mypy/applytype.py b/mypy/applytype.py index b66e148ee0ab..a81ed3cd1f16 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -3,21 +3,24 @@ from typing import Callable, Sequence import mypy.subtypes -from mypy.expandtype import expand_type -from mypy.nodes import Context +from mypy.expandtype import expand_type, expand_unpack_with_variables +from mypy.nodes import ARG_STAR, Context from mypy.types import ( AnyType, CallableType, Parameters, ParamSpecType, PartialType, + TupleType, Type, TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, + UnpackType, get_proper_type, ) +from mypy.typevartuples import find_unpack_in_list, replace_starargs def get_target_type( @@ -72,6 +75,7 @@ def apply_generic_arguments( report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], context: Context, skip_unsatisfied: bool = False, + allow_erased_callables: bool = False, ) -> CallableType: """Apply generic type arguments to a callable type. @@ -110,11 +114,63 @@ def apply_generic_arguments( callable = callable.expand_param_spec(nt) # Apply arguments to argument types. - arg_types = [expand_type(at, id_to_type) for at in callable.arg_types] + var_arg = callable.var_arg() + if var_arg is not None and isinstance(var_arg.typ, UnpackType): + star_index = callable.arg_kinds.index(ARG_STAR) + callable = callable.copy_modified( + arg_types=( + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[:star_index] + ] + + [callable.arg_types[star_index]] + + [ + expand_type(at, id_to_type, allow_erased_callables) + for at in callable.arg_types[star_index + 1 :] + ] + ) + ) + + unpacked_type = get_proper_type(var_arg.typ.type) + if isinstance(unpacked_type, TupleType): + # Assuming for now that because we convert prefixes to positional arguments, + # the first argument is always an unpack. + expanded_tuple = expand_type(unpacked_type, id_to_type) + if isinstance(expanded_tuple, TupleType): + # TODO: handle the case where the tuple has an unpack. This will + # hit an assert below. + expanded_unpack = find_unpack_in_list(expanded_tuple.items) + if expanded_unpack is not None: + callable = callable.copy_modified( + arg_types=( + callable.arg_types[:star_index] + + [expanded_tuple] + + callable.arg_types[star_index + 1 :] + ) + ) + else: + callable = replace_starargs(callable, expanded_tuple.items) + else: + # TODO: handle the case for if we get a variable length tuple. + assert False, f"mypy bug: unimplemented case, {expanded_tuple}" + elif isinstance(unpacked_type, TypeVarTupleType): + expanded_tvt = expand_unpack_with_variables(var_arg.typ, id_to_type) + assert isinstance(expanded_tvt, list) + for t in expanded_tvt: + assert not isinstance(t, UnpackType) + callable = replace_starargs(callable, expanded_tvt) + else: + assert False, "mypy bug: unhandled case applying unpack" + else: + callable = callable.copy_modified( + arg_types=[ + expand_type(at, id_to_type, allow_erased_callables) for at in callable.arg_types + ] + ) # Apply arguments to TypeGuard if any. if callable.type_guard is not None: - type_guard = expand_type(callable.type_guard, id_to_type) + type_guard = expand_type(callable.type_guard, id_to_type, allow_erased_callables) else: type_guard = None @@ -122,8 +178,7 @@ def apply_generic_arguments( remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( - arg_types=arg_types, - ret_type=expand_type(callable.ret_type, id_to_type), + ret_type=expand_type(callable.ret_type, id_to_type, allow_erased_callables), variables=remaining_tvars, type_guard=type_guard, ) diff --git a/mypy/binder.py b/mypy/binder.py index 8e49f87c2506..d822aecec2f3 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -8,11 +8,21 @@ from mypy.erasetype import remove_instance_last_known_values from mypy.join import join_simple from mypy.literals import Key, literal, literal_hash, subkeys -from mypy.nodes import AssignmentExpr, Expression, IndexExpr, MemberExpr, NameExpr, RefExpr, Var +from mypy.nodes import Expression, IndexExpr, MemberExpr, NameExpr, RefExpr, TypeInfo, Var from mypy.subtypes import is_same_type, is_subtype -from mypy.types import AnyType, NoneType, PartialType, Type, TypeOfAny, UnionType, get_proper_type +from mypy.types import ( + AnyType, + NoneType, + PartialType, + Type, + TypeOfAny, + TypeType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars_with_any -BindableExpression: _TypeAlias = Union[IndexExpr, MemberExpr, AssignmentExpr, NameExpr] +BindableExpression: _TypeAlias = Union[IndexExpr, MemberExpr, NameExpr] class Frame: @@ -133,7 +143,7 @@ def _get(self, key: Key, index: int = -1) -> Type | None: return None def put(self, expr: Expression, typ: Type) -> None: - if not isinstance(expr, (IndexExpr, MemberExpr, AssignmentExpr, NameExpr)): + if not isinstance(expr, (IndexExpr, MemberExpr, NameExpr)): return if not literal(expr): return @@ -439,8 +449,11 @@ def top_frame_context(self) -> Iterator[Frame]: def get_declaration(expr: BindableExpression) -> Type | None: - if isinstance(expr, RefExpr) and isinstance(expr.node, Var): - type = expr.node.type - if not isinstance(get_proper_type(type), PartialType): - return type + if isinstance(expr, RefExpr): + if isinstance(expr.node, Var): + type = expr.node.type + if not isinstance(get_proper_type(type), PartialType): + return type + elif isinstance(expr.node, TypeInfo): + return TypeType(fill_typevars_with_any(expr.node)) return None diff --git a/mypy/build.py b/mypy/build.py index 1d7ab25c989e..a4817d1866c7 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -12,6 +12,7 @@ from __future__ import annotations +import collections import contextlib import errno import gc @@ -35,7 +36,6 @@ Mapping, NamedTuple, NoReturn, - Optional, Sequence, TextIO, TypeVar, @@ -48,7 +48,9 @@ from mypy.checker import TypeChecker from mypy.errors import CompileError, ErrorInfo, Errors, report_internal_error from mypy.indirection import TypeIndirectionVisitor -from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable +from mypy.messages import MessageBuilder +from mypy.nodes import Import, ImportAll, ImportBase, ImportFrom, MypyFile, SymbolTable, TypeInfo +from mypy.partially_defined import PossiblyUndefinedVariableVisitor from mypy.semanal import SemanticAnalyzer from mypy.semanal_pass1 import SemanticAnalyzerPreAnalysis from mypy.util import ( @@ -91,9 +93,14 @@ from mypy.plugins.default import DefaultPlugin from mypy.renaming import LimitedVariableRenameVisitor, VariableRenameVisitor from mypy.stats import dump_type_stats -from mypy.stubinfo import is_legacy_bundled_package, legacy_bundled_packages +from mypy.stubinfo import ( + is_legacy_bundled_package, + legacy_bundled_packages, + non_bundled_packages, + stub_package_name, +) from mypy.types import Type -from mypy.typestate import TypeState, reset_global_state +from mypy.typestate import reset_global_state, type_state from mypy.version import __version__ # Switch to True to produce debug output related to fine-grained incremental @@ -231,14 +238,13 @@ def _build( errors = Errors( options.show_error_context, options.show_column_numbers, - options.show_error_codes, + options.hide_error_codes, options.pretty, options.show_error_end, lambda path: read_py_file(path, cached_read), options.show_absolute_path, - options.enabled_error_codes, - options.disabled_error_codes, options.many_errors_threshold, + options, ) plugin, snapshot = load_plugins(options, errors, stdout, extra_plugins) @@ -270,9 +276,11 @@ def _build( try: graph = dispatch(sources, manager, stdout) if not options.fine_grained_incremental: - TypeState.reset_all_subtype_caches() + type_state.reset_all_subtype_caches() if options.timing_stats is not None: dump_timing_stats(options.timing_stats, graph) + if options.line_checking_stats is not None: + dump_line_checking_stats(options.line_checking_stats, graph) return BuildResult(manager, graph) finally: t0 = time.time() @@ -422,7 +430,7 @@ def plugin_error(message: str) -> NoReturn: errors.raise_error(use_stdout=False) custom_plugins: list[Plugin] = [] - errors.set_file(options.config_file, None) + errors.set_file(options.config_file, None, options) for plugin_path in options.plugins: func_name = "plugin" plugin_dir: str | None = None @@ -668,16 +676,10 @@ def __init__( raise CompileError( [f"Failed to find builtin module {module}, perhaps typeshed is broken?"] ) - if is_typeshed_file(path): - continue - if is_stub_package_file(path): + if is_typeshed_file(options.abs_custom_typeshed_dir, path) or is_stub_package_file( + path + ): continue - if options.custom_typeshed_dir is not None: - # Check if module lives under custom_typeshed_dir subtree - custom_typeshed_dir = os.path.abspath(options.custom_typeshed_dir) - path = os.path.abspath(path) - if os.path.commonpath((path, custom_typeshed_dir)) == custom_typeshed_dir: - continue raise CompileError( [ @@ -779,7 +781,7 @@ def correct_rel_imp(imp: ImportFrom | ImportAll) -> str: new_id = file_id + "." + imp.id if imp.id else file_id if not new_id: - self.errors.set_file(file.path, file.name) + self.errors.set_file(file.path, file.name, self.options) self.errors.report( imp.line, 0, "No parent module -- cannot perform relative import", blocker=True ) @@ -990,7 +992,7 @@ def write_deps_cache( error = True if error: - manager.errors.set_file(_cache_dir_prefix(manager.options), None) + manager.errors.set_file(_cache_dir_prefix(manager.options), None, manager.options) manager.errors.report(0, 0, "Error writing fine-grained dependencies cache", blocker=True) @@ -1054,7 +1056,7 @@ def generate_deps_for_cache(manager: BuildManager, graph: Graph) -> dict[str, di def write_plugins_snapshot(manager: BuildManager) -> None: """Write snapshot of versions and hashes of currently active plugins.""" if not manager.metastore.write(PLUGIN_SNAPSHOT_FILE, json.dumps(manager.plugins_snapshot)): - manager.errors.set_file(_cache_dir_prefix(manager.options), None) + manager.errors.set_file(_cache_dir_prefix(manager.options), None, manager.options) manager.errors.report(0, 0, "Error writing plugins snapshot", blocker=True) @@ -1125,6 +1127,7 @@ def read_deps_cache(manager: BuildManager, graph: Graph) -> dict[str, FgDepMeta] return None module_deps_metas = deps_meta["deps_meta"] + assert isinstance(module_deps_metas, dict) if not manager.options.skip_cache_mtime_checks: for id, meta in module_deps_metas.items(): try: @@ -1157,7 +1160,7 @@ def _load_json_file( result = json.loads(data) manager.add_stats(data_json_load_time=time.time() - t1) except json.JSONDecodeError: - manager.errors.set_file(file, None) + manager.errors.set_file(file, None, manager.options) manager.errors.report( -1, -1, @@ -1169,6 +1172,7 @@ def _load_json_file( ) return None else: + assert isinstance(result, dict) return result @@ -1292,11 +1296,15 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> CacheMeta | No ) # Don't check for path match, that is dealt with in validate_meta(). + # + # TODO: these `type: ignore`s wouldn't be necessary + # if the type annotations for CacheMeta were more accurate + # (all of these attributes can be `None`) if ( m.id != id - or m.mtime is None - or m.size is None - or m.dependencies is None + or m.mtime is None # type: ignore[redundant-expr] + or m.size is None # type: ignore[redundant-expr] + or m.dependencies is None # type: ignore[redundant-expr] or m.data_mtime is None ): manager.log(f"Metadata abandoned for {id}: attributes are missing") @@ -1396,8 +1404,8 @@ def validate_meta( st = manager.get_stat(path) except OSError: return None - if not (stat.S_ISREG(st.st_mode) or stat.S_ISDIR(st.st_mode)): - manager.log(f"Metadata abandoned for {id}: file {path} does not exist") + if not stat.S_ISDIR(st.st_mode) and not stat.S_ISREG(st.st_mode): + manager.log(f"Metadata abandoned for {id}: file or directory {path} does not exist") return None manager.add_stats(validate_stat_time=time.time() - t0) @@ -1884,6 +1892,10 @@ class State: # Cumulative time spent on this file, in microseconds (for profiling stats) time_spent_us: int = 0 + # Per-line type-checking time (cumulative time spent type-checking expressions + # on a given source code line). + per_line_checking_time_ns: dict[int, int] + def __init__( self, id: str | None, @@ -1935,6 +1947,8 @@ def __init__( raise if follow_imports == "silent": self.ignore_all = True + elif path and is_silent_import_module(manager, path) and not root_source: + self.ignore_all = True self.path = path if path: self.abspath = os.path.abspath(path) @@ -1949,6 +1963,7 @@ def __init__( source = "" self.source = source self.add_ancestors() + self.per_line_checking_time_ns = collections.defaultdict(int) t0 = time.time() self.meta = validate_meta(self.meta, self.id, self.path, self.ignore_all, manager) self.manager.add_stats(validate_meta_time=time.time() - t0) @@ -2206,7 +2221,7 @@ def parse_inline_configuration(self, source: str) -> None: if flags: changes, config_errors = parse_mypy_comments(flags, self.options) self.options = self.options.apply_changes(changes) - self.manager.errors.set_file(self.xpath, self.id) + self.manager.errors.set_file(self.xpath, self.id, self.options) for lineno, error in config_errors: self.manager.errors.report(lineno, 0, error) @@ -2313,6 +2328,7 @@ def type_checker(self) -> TypeChecker: self.tree, self.xpath, manager.plugin, + self.per_line_checking_time_ns, ) return self._type_checker @@ -2331,6 +2347,25 @@ def type_check_second_pass(self) -> bool: self.time_spent_us += time_spent_us(t0) return result + def detect_possibly_undefined_vars(self) -> None: + assert self.tree is not None, "Internal error: method must be called on parsed file only" + if self.tree.is_stub: + # We skip stub files because they aren't actually executed. + return + manager = self.manager + manager.errors.set_file(self.xpath, self.tree.fullname, options=self.options) + if manager.errors.is_error_code_enabled( + codes.POSSIBLY_UNDEFINED + ) or manager.errors.is_error_code_enabled(codes.USED_BEFORE_DEF): + self.tree.accept( + PossiblyUndefinedVariableVisitor( + MessageBuilder(manager.errors, manager.modules), + self.type_map(), + self.options, + self.tree.names, + ) + ) + def finish_passes(self) -> None: assert self.tree is not None, "Internal error: method must be called on parsed file only" manager = self.manager @@ -2345,7 +2380,24 @@ def finish_passes(self) -> None: # We should always patch indirect dependencies, even in full (non-incremental) builds, # because the cache still may be written, and it must be correct. - self._patch_indirect_dependencies(self.type_checker().module_refs, self.type_map()) + # TODO: find a more robust way to traverse *all* relevant types? + expr_types = set(self.type_map().values()) + symbol_types = set() + for _, sym, _ in self.tree.local_definitions(): + if sym.type is not None: + symbol_types.add(sym.type) + if isinstance(sym.node, TypeInfo): + # TypeInfo symbols have some extra relevant types. + symbol_types.update(sym.node.bases) + if sym.node.metaclass_type: + symbol_types.add(sym.node.metaclass_type) + if sym.node.typeddict_type: + symbol_types.add(sym.node.typeddict_type) + if sym.node.tuple_type: + symbol_types.add(sym.node.tuple_type) + self._patch_indirect_dependencies( + self.type_checker().module_refs, expr_types | symbol_types + ) if self.options.dump_inference_stats: dump_type_stats( @@ -2368,10 +2420,7 @@ def free_state(self) -> None: self._type_checker.reset() self._type_checker = None - def _patch_indirect_dependencies( - self, module_refs: set[str], type_map: dict[Expression, Type] - ) -> None: - types = set(type_map.values()) + def _patch_indirect_dependencies(self, module_refs: set[str], types: set[Type]) -> None: assert None not in types valid = self.valid_references() @@ -2413,7 +2462,7 @@ def update_fine_grained_deps(self, deps: dict[str, set[str]]) -> None: from mypy.server.deps import merge_dependencies # Lazy import to speed up startup merge_dependencies(self.compute_fine_grained_deps(), deps) - TypeState.update_protocol_deps(deps) + type_state.update_protocol_deps(deps) def valid_references(self) -> set[str]: assert self.ancestors is not None @@ -2433,6 +2482,12 @@ def write_cache(self) -> None: or self.options.cache_dir == os.devnull or self.options.fine_grained_incremental ): + if self.options.debug_serialize: + try: + self.tree.serialize() + except Exception: + print(f"Error serializing {self.id}", file=self.manager.stdout) + raise # Propagate to display traceback return is_errors = self.transitive_error if is_errors: @@ -2490,7 +2545,8 @@ def verify_dependencies(self, suppressed_only: bool = False) -> None: line = self.dep_line_map.get(dep, 1) try: if dep in self.ancestors: - state, ancestor = None, self # type: (Optional[State], Optional[State]) + state: State | None = None + ancestor: State | None = self else: state, ancestor = self, None # Called just for its side effects of producing diagnostics. @@ -2575,11 +2631,11 @@ def find_module_and_diagnose( if ( root_source # Honor top-level modules or ( - not result.endswith(".py") # Stubs are always normal - and not options.follow_imports_for_stubs - ) # except when they aren't - or id in mypy.semanal_main.core_modules - ): # core is always normal + result.endswith(".pyi") # Stubs are always normal + and not options.follow_imports_for_stubs # except when they aren't + ) + or id in mypy.semanal_main.core_modules # core is always normal + ): follow_imports = "normal" if skip_diagnose: pass @@ -2596,11 +2652,8 @@ def find_module_and_diagnose( else: skipping_module(manager, caller_line, caller_state, id, result) raise ModuleNotFound - if not manager.options.no_silence_site_packages: - for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path: - if is_sub_path(result, dir): - # Silence errors in site-package dirs and typeshed - follow_imports = "silent" + if is_silent_import_module(manager, result) and not root_source: + follow_imports = "silent" return (result, follow_imports) else: # Could not find a module. Typically the reason is a @@ -2698,11 +2751,8 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: else: parent_mod = parent_st.tree if parent_mod is not None: - if parent_mod.is_partial_stub_package: - return True - else: - # Bail out soon, complete subpackage found - return False + # Bail out soon, complete subpackage found + return parent_mod.is_partial_stub_package id = parent return False @@ -2717,7 +2767,7 @@ def module_not_found( errors = manager.errors save_import_context = errors.import_context() errors.set_import_context(caller_state.import_context) - errors.set_file(caller_state.xpath, caller_state.id) + errors.set_file(caller_state.xpath, caller_state.id, caller_state.options) if target == "builtins": errors.report( line, 0, "Cannot find 'builtins' module. Typeshed appears broken!", blocker=True @@ -2726,17 +2776,16 @@ def module_not_found( else: daemon = manager.options.fine_grained_incremental msg, notes = reason.error_message_templates(daemon) - pyver = "%d.%d" % manager.options.python_version - errors.report(line, 0, msg.format(module=target, pyver=pyver), code=codes.IMPORT) + errors.report(line, 0, msg.format(module=target), code=codes.IMPORT) top_level, second_level = get_top_two_prefixes(target) - if second_level in legacy_bundled_packages: + if second_level in legacy_bundled_packages or second_level in non_bundled_packages: top_level = second_level for note in notes: if "{stub_dist}" in note: - note = note.format(stub_dist=legacy_bundled_packages[top_level]) + note = note.format(stub_dist=stub_package_name(top_level)) errors.report(line, 0, note, severity="note", only_once=True, code=codes.IMPORT) if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: - manager.missing_stub_packages.add(legacy_bundled_packages[top_level]) + manager.missing_stub_packages.add(stub_package_name(top_level)) errors.set_import_context(save_import_context) @@ -2747,7 +2796,7 @@ def skipping_module( assert caller_state, (id, path) save_import_context = manager.errors.import_context() manager.errors.set_import_context(caller_state.import_context) - manager.errors.set_file(caller_state.xpath, caller_state.id) + manager.errors.set_file(caller_state.xpath, caller_state.id, manager.options) manager.errors.report(line, 0, f'Import of "{id}" ignored', severity="error") manager.errors.report( line, @@ -2766,7 +2815,7 @@ def skipping_ancestor(manager: BuildManager, id: str, path: str, ancestor_for: S # But beware, some package may be the ancestor of many modules, # so we'd need to cache the decision. manager.errors.set_import_context([]) - manager.errors.set_file(ancestor_for.xpath, ancestor_for.id) + manager.errors.set_file(ancestor_for.xpath, ancestor_for.id, manager.options) manager.errors.report( -1, -1, f'Ancestor package "{id}" ignored', severity="error", only_once=True ) @@ -2880,7 +2929,7 @@ def dispatch(sources: list[BuildSource], manager: BuildManager, stdout: TextIO) # then we need to collect fine grained protocol dependencies. # Since these are a global property of the program, they are calculated after we # processed the whole graph. - TypeState.add_all_protocol_deps(manager.fg_deps) + type_state.add_all_protocol_deps(manager.fg_deps) if not manager.options.fine_grained_incremental: rdeps = generate_deps_for_cache(manager, graph) write_deps_cache(rdeps, manager, graph) @@ -2917,13 +2966,22 @@ def dumps(self) -> str: def dump_timing_stats(path: str, graph: Graph) -> None: - """ - Dump timing stats for each file in the given graph - """ + """Dump timing stats for each file in the given graph.""" with open(path, "w") as f: - for k in sorted(graph.keys()): - v = graph[k] - f.write(f"{v.id} {v.time_spent_us}\n") + for id in sorted(graph): + f.write(f"{id} {graph[id].time_spent_us}\n") + + +def dump_line_checking_stats(path: str, graph: Graph) -> None: + """Dump per-line expression type checking stats.""" + with open(path, "w") as f: + for id in sorted(graph): + if not graph[id].per_line_checking_time_ns: + continue + f.write(f"{id}:\n") + for line in sorted(graph[id].per_line_checking_time_ns): + line_time = graph[id].per_line_checking_time_ns[line] + f.write(f"{line:>5} {line_time/1000:8.1f}\n") def dump_graph(graph: Graph, stdout: TextIO | None = None) -> None: @@ -2995,12 +3053,16 @@ def load_graph( for bs in sources: try: st = State( - id=bs.module, path=bs.path, source=bs.text, manager=manager, root_source=True + id=bs.module, + path=bs.path, + source=bs.text, + manager=manager, + root_source=not bs.followed, ) except ModuleNotFound: continue if st.id in graph: - manager.errors.set_file(st.xpath, st.id) + manager.errors.set_file(st.xpath, st.id, manager.options) manager.errors.report( -1, -1, @@ -3359,6 +3421,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No graph[id].type_check_first_pass() if not graph[id].type_checker().deferred_nodes: unfinished_modules.discard(id) + graph[id].detect_possibly_undefined_vars() graph[id].finish_passes() while unfinished_modules: @@ -3367,6 +3430,7 @@ def process_stale_scc(graph: Graph, scc: list[str], manager: BuildManager) -> No continue if not graph[id].type_check_second_pass(): unfinished_modules.discard(id) + graph[id].detect_possibly_undefined_vars() graph[id].finish_passes() for id in stale: graph[id].generate_unused_ignore_notes() @@ -3542,3 +3606,13 @@ def record_missing_stub_packages(cache_dir: str, missing_stub_packages: set[str] else: if os.path.isfile(fnam): os.remove(fnam) + + +def is_silent_import_module(manager: BuildManager, path: str) -> bool: + if manager.options.no_silence_site_packages: + return False + # Silence errors in site-package dirs and typeshed + return any( + is_sub_path(path, dir) + for dir in manager.search_paths.package_path + manager.search_paths.typeshed_path + ) diff --git a/mypy/checker.py b/mypy/checker.py index 4991177e6c1d..4bf009f74092 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -2,13 +2,11 @@ from __future__ import annotations -import fnmatch import itertools from collections import defaultdict from contextlib import contextmanager, nullcontext from typing import ( AbstractSet, - Any, Callable, Dict, Generic, @@ -41,7 +39,7 @@ from mypy.erasetype import erase_type, erase_typevars, remove_instance_last_known_values from mypy.errorcodes import TYPE_VAR, UNUSED_AWAITABLE, UNUSED_COROUTINE, ErrorCode from mypy.errors import Errors, ErrorWatcher, report_internal_error -from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance from mypy.join import join_types from mypy.literals import Key, literal, literal_hash from mypy.maptype import map_instance_to_supertype @@ -64,6 +62,7 @@ ARG_STAR, CONTRAVARIANT, COVARIANT, + FUNC_NO_INFO, GDEF, IMPLICITLY_ABSTRACT, INVARIANT, @@ -71,11 +70,13 @@ LDEF, LITERAL_TYPE, MDEF, + NOT_ABSTRACT, AssertStmt, AssignmentExpr, AssignmentStmt, Block, BreakStmt, + BytesExpr, CallExpr, ClassDef, ComparisonExpr, @@ -86,6 +87,7 @@ EllipsisExpr, Expression, ExpressionStmt, + FloatExpr, ForStmt, FuncBase, FuncDef, @@ -115,6 +117,8 @@ ReturnStmt, StarExpr, Statement, + StrExpr, + SymbolNode, SymbolTable, SymbolTableNode, TempNode, @@ -158,6 +162,7 @@ erase_to_bound, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, get_type_vars, is_literal_type_like, @@ -167,33 +172,36 @@ true_only, try_expanding_sum_type_to_union, try_getting_int_literals_from_type, + try_getting_str_literals, try_getting_str_literals_from_type, tuple_fallback, ) from mypy.types import ( + ANY_STRATEGY, + MYPYC_NATIVE_INT_NAMES, OVERLOAD_NAMES, AnyType, + BoolTypeQuery, CallableType, DeletedType, + ErasedType, FunctionLike, Instance, LiteralType, NoneType, Overloaded, - ParamSpecType, PartialType, ProperType, - StarType, TupleType, Type, TypeAliasType, TypedDictType, TypeGuardedType, TypeOfAny, - TypeQuery, TypeTranslator, TypeType, TypeVarId, + TypeVarLikeType, TypeVarType, UnboundType, UninhabitedType, @@ -205,6 +213,7 @@ is_named_instance, is_optional, remove_optional, + store_argument_type, strip_type, ) from mypy.typetraverser import TypeTraverserVisitor @@ -325,8 +334,6 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface): current_node_deferred = False # Is this file a typeshed stub? is_typeshed_stub = False - # Should strict Optional-related errors be suppressed in this file? - suppress_none_errors = False # TODO: Get it from options instead options: Options # Used for collecting inferred attribute types so that they can be checked # for consistency. @@ -358,6 +365,7 @@ def __init__( tree: MypyFile, path: str, plugin: Plugin, + per_line_checking_time_ns: dict[int, int], ) -> None: """Construct a type checker. @@ -370,8 +378,6 @@ def __init__( self.path = path self.msg = MessageBuilder(errors, modules) self.plugin = plugin - self.expr_checker = mypy.checkexpr.ExpressionChecker(self, self.msg, self.plugin) - self.pattern_checker = PatternChecker(self, self.msg, self.plugin) self.tscope = Scope() self.scope = CheckerScope(tree) self.binder = ConditionalTypeBinder() @@ -387,14 +393,9 @@ def __init__( self.pass_num = 0 self.current_node_deferred = False self.is_stub = tree.is_stub - self.is_typeshed_stub = is_typeshed_file(path) + self.is_typeshed_stub = is_typeshed_file(options.abs_custom_typeshed_dir, path) self.inferred_attribute_types = None - if options.strict_optional_whitelist is None: - self.suppress_none_errors = not options.show_none_errors - else: - self.suppress_none_errors = not any( - fnmatch.fnmatch(path, pattern) for pattern in options.strict_optional_whitelist - ) + # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. self.recurse_into_functions = True @@ -414,6 +415,12 @@ def __init__( # example when type-checking class decorators. self.allow_abstract_call = False + # Child checker objects for specific AST node types + self.expr_checker = mypy.checkexpr.ExpressionChecker( + self, self.msg, self.plugin, per_line_checking_time_ns + ) + self.pattern_checker = PatternChecker(self, self.msg, self.plugin) + @property def type_context(self) -> list[Type | None]: return self.expr_checker.type_context @@ -451,7 +458,9 @@ def check_first_pass(self) -> None: """ self.recurse_into_functions = True with state.strict_optional_set(self.options.strict_optional): - self.errors.set_file(self.path, self.tree.fullname, scope=self.tscope) + self.errors.set_file( + self.path, self.tree.fullname, scope=self.tscope, options=self.options + ) with self.tscope.module_scope(self.tree.fullname): with self.enter_partial_types(), self.binder.top_frame_context(): for d in self.tree.defs: @@ -490,7 +499,9 @@ def check_second_pass( with state.strict_optional_set(self.options.strict_optional): if not todo and not self.deferred_nodes: return False - self.errors.set_file(self.path, self.tree.fullname, scope=self.tscope) + self.errors.set_file( + self.path, self.tree.fullname, scope=self.tscope, options=self.options + ) with self.tscope.module_scope(self.tree.fullname): self.pass_num += 1 if not todo: @@ -621,7 +632,7 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.visit_decorator(cast(Decorator, defn.items[0])) for fdef in defn.items: assert isinstance(fdef, Decorator) - self.check_func_item(fdef.func, name=fdef.func.name) + self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True) if fdef.func.abstract_status in (IS_ABSTRACT, IMPLICITLY_ABSTRACT): num_abstract += 1 if num_abstract not in (0, len(defn.items)): @@ -728,9 +739,10 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # This is to match the direction the implementation's return # needs to be compatible in. if impl_type.variables: - impl = unify_generic_callable( - impl_type, - sig1, + impl: CallableType | None = unify_generic_callable( + # Normalize both before unifying + impl_type.with_unpacked_kwargs(), + sig1.with_unpacked_kwargs(), ignore_return=False, return_constraint_direction=SUPERTYPE_OF, ) @@ -747,14 +759,14 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # Is the overload alternative's arguments subtypes of the implementation's? if not is_callable_compatible( - impl, sig1, is_compat=is_subtype_no_promote, ignore_return=True + impl, sig1, is_compat=is_subtype, ignore_return=True ): self.msg.overloaded_signatures_arg_specific(i + 1, defn.impl) # Is the overload alternative's return type a subtype of the implementation's? if not ( - is_subtype_no_promote(sig1.ret_type, impl.ret_type) - or is_subtype_no_promote(impl.ret_type, sig1.ret_type) + is_subtype(sig1.ret_type, impl.ret_type) + or is_subtype(impl.ret_type, sig1.ret_type) ): self.msg.overloaded_signatures_ret_specific(i + 1, defn.impl) @@ -839,6 +851,10 @@ def get_generator_yield_type(self, return_type: Type, is_coroutine: bool) -> Typ if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_yield_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type( return_type, is_coroutine ) and not self.is_async_generator_return_type(return_type): @@ -869,6 +885,10 @@ def get_generator_receive_type(self, return_type: Type, is_coroutine: bool) -> T if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_receive_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type( return_type, is_coroutine ) and not self.is_async_generator_return_type(return_type): @@ -908,6 +928,10 @@ def get_generator_return_type(self, return_type: Type, is_coroutine: bool) -> Ty if isinstance(return_type, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=return_type) + elif isinstance(return_type, UnionType): + return make_simplified_union( + [self.get_generator_return_type(item, is_coroutine) for item in return_type.items] + ) elif not self.is_generator_return_type(return_type, is_coroutine): # If the function doesn't have a proper Generator (or # Awaitable) return type, anything is permissible. @@ -950,16 +974,16 @@ def _visit_func_def(self, defn: FuncDef) -> None: new_type = self.function_type(defn) if isinstance(defn.original_def, FuncDef): # Function definition overrides function definition. - if not is_same_type(new_type, self.function_type(defn.original_def)): - self.msg.incompatible_conditional_function_def(defn) + old_type = self.function_type(defn.original_def) + if not is_same_type(new_type, old_type): + self.msg.incompatible_conditional_function_def(defn, old_type, new_type) else: # Function definition overrides a variable initialized via assignment or a # decorated function. orig_type = defn.original_def.type if orig_type is None: - # XXX This can be None, as happens in - # test_testcheck_TypeCheckSuite.testRedefinedFunctionInTryWithElse - self.msg.note("Internal mypy error checking function redefinition", defn) + # If other branch is unreachable, we don't type check it and so we might + # not have a type for the original definition return if isinstance(orig_type, PartialType): if orig_type.type is None: @@ -977,7 +1001,9 @@ def _visit_func_def(self, defn: FuncDef) -> None: # Trying to redefine something like partial empty list as function. self.fail(message_registry.INCOMPATIBLE_REDEFINITION, defn) else: - # TODO: Update conditional type binder. + name_expr = NameExpr(defn.name) + name_expr.node = defn.original_def + self.binder.assign_type(name_expr, new_type, orig_type) self.check_subtype( new_type, orig_type, @@ -988,7 +1014,11 @@ def _visit_func_def(self, defn: FuncDef) -> None: ) def check_func_item( - self, defn: FuncItem, type_override: CallableType | None = None, name: str | None = None + self, + defn: FuncItem, + type_override: CallableType | None = None, + name: str | None = None, + allow_empty: bool = False, ) -> None: """Type check a function. @@ -1002,7 +1032,7 @@ def check_func_item( typ = type_override.copy_modified(line=typ.line, column=typ.column) if isinstance(typ, CallableType): with self.enter_attribute_inference_context(): - self.check_func_def(defn, typ, name) + self.check_func_def(defn, typ, name, allow_empty) else: raise RuntimeError("Not supported") @@ -1019,7 +1049,9 @@ def enter_attribute_inference_context(self) -> Iterator[None]: yield None self.inferred_attribute_types = old_types - def check_func_def(self, defn: FuncItem, typ: CallableType, name: str | None) -> None: + def check_func_def( + self, defn: FuncItem, typ: CallableType, name: str | None, allow_empty: bool = False + ) -> None: """Type check a function definition.""" # Expand type variables with value restrictions to ordinary types. expanded = self.expand_typevars(defn, typ) @@ -1034,11 +1066,13 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: str | None) -> # precise type. if isinstance(item, FuncDef): fdef = item - # Check if __init__ has an invalid, non-None return type. + # Check if __init__ has an invalid return type. if ( fdef.info and fdef.name in ("__init__", "__init_subclass__") - and not isinstance(get_proper_type(typ.ret_type), NoneType) + and not isinstance( + get_proper_type(typ.ret_type), (NoneType, UninhabitedType) + ) and not self.dynamic_funcs[-1] ): self.fail( @@ -1160,16 +1194,8 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: str | None) -> if ctx.line < 0: ctx = typ self.fail(message_registry.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) - if typ.arg_kinds[i] == nodes.ARG_STAR: - if not isinstance(arg_type, ParamSpecType): - # builtins.tuple[T] is typing.Tuple[T, ...] - arg_type = self.named_generic_type("builtins.tuple", [arg_type]) - elif typ.arg_kinds[i] == nodes.ARG_STAR2: - if not isinstance(arg_type, ParamSpecType): - arg_type = self.named_generic_type( - "builtins.dict", [self.str_type(), arg_type] - ) - item.arguments[i].variable.type = arg_type + # Need to store arguments again for the expanded item. + store_argument_type(item, i, typ, self.named_generic_type) # Type check initialization expressions. body_is_trivial = is_trivial_body(defn.body) @@ -1189,7 +1215,7 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: str | None) -> self.accept(item.body) unreachable = self.binder.is_unreachable() - if not unreachable and not body_is_trivial: + if not unreachable: if defn.is_generator or is_named_instance( self.return_types[-1], "typing.AwaitableGenerator" ): @@ -1202,27 +1228,79 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: str | None) -> return_type = self.return_types[-1] return_type = get_proper_type(return_type) + allow_empty = allow_empty or self.options.allow_empty_bodies + + show_error = ( + not body_is_trivial + or + # Allow empty bodies for abstract methods, overloads, in tests and stubs. + ( + not allow_empty + and not ( + isinstance(defn, FuncDef) and defn.abstract_status != NOT_ABSTRACT + ) + and not self.is_stub + ) + ) + + # Ignore plugin generated methods, these usually don't need any bodies. + if defn.info is not FUNC_NO_INFO and ( + defn.name not in defn.info.names or defn.info.names[defn.name].plugin_generated + ): + show_error = False + + # Ignore also definitions that appear in `if TYPE_CHECKING: ...` blocks. + # These can't be called at runtime anyway (similar to plugin-generated). + if isinstance(defn, FuncDef) and defn.is_mypy_only: + show_error = False + + # We want to minimize the fallout from checking empty bodies + # that was absent in many mypy versions. + if body_is_trivial and is_subtype(NoneType(), return_type): + show_error = False + + may_be_abstract = ( + body_is_trivial + and defn.info is not FUNC_NO_INFO + and defn.info.metaclass_type is not None + and defn.info.metaclass_type.type.has_base("abc.ABCMeta") + ) + if self.options.warn_no_return: - if not isinstance(return_type, (NoneType, AnyType)): + if ( + not self.current_node_deferred + and not isinstance(return_type, (NoneType, AnyType)) + and show_error + ): # Control flow fell off the end of a function that was - # declared to return a non-None type and is not - # entirely pass/Ellipsis/raise NotImplementedError. + # declared to return a non-None type. if isinstance(return_type, UninhabitedType): # This is a NoReturn function - self.fail(message_registry.INVALID_IMPLICIT_RETURN, defn) + msg = message_registry.INVALID_IMPLICIT_RETURN else: - self.fail(message_registry.MISSING_RETURN_STATEMENT, defn) - else: + msg = message_registry.MISSING_RETURN_STATEMENT + if body_is_trivial: + msg = msg._replace(code=codes.EMPTY_BODY) + self.fail(msg, defn) + if may_be_abstract: + self.note(message_registry.EMPTY_BODY_ABSTRACT, defn) + elif show_error: + msg = message_registry.INCOMPATIBLE_RETURN_VALUE_TYPE + if body_is_trivial: + msg = msg._replace(code=codes.EMPTY_BODY) # similar to code in check_return_stmt - self.check_subtype( - subtype_label="implicitly returns", - subtype=NoneType(), - supertype_label="expected", - supertype=return_type, - context=defn, - msg=message_registry.INCOMPATIBLE_RETURN_VALUE_TYPE, - code=codes.RETURN_VALUE, - ) + if ( + not self.check_subtype( + subtype_label="implicitly returns", + subtype=NoneType(), + supertype_label="expected", + supertype=return_type, + context=defn, + msg=msg, + ) + and may_be_abstract + ): + self.note(message_registry.EMPTY_BODY_ABSTRACT, defn) self.return_types.pop() @@ -1230,13 +1308,23 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: str | None) -> def check_unbound_return_typevar(self, typ: CallableType) -> None: """Fails when the return typevar is not defined in arguments.""" - if typ.ret_type in typ.variables: - arg_type_visitor = CollectArgTypes() + if isinstance(typ.ret_type, TypeVarType) and typ.ret_type in typ.variables: + arg_type_visitor = CollectArgTypeVarTypes() for argtype in typ.arg_types: argtype.accept(arg_type_visitor) if typ.ret_type not in arg_type_visitor.arg_types: self.fail(message_registry.UNBOUND_TYPEVAR, typ.ret_type, code=TYPE_VAR) + upper_bound = get_proper_type(typ.ret_type.upper_bound) + if not ( + isinstance(upper_bound, Instance) + and upper_bound.type.fullname == "builtins.object" + ): + self.note( + "Consider using the upper bound " + f"{format_type(typ.ret_type.upper_bound)} instead", + context=typ.ret_type, + ) def check_default_args(self, item: FuncItem, body_is_trivial: bool) -> None: for arg in item.arguments: @@ -1250,14 +1338,27 @@ def check_default_args(self, item: FuncItem, body_is_trivial: bool) -> None: msg += f"tuple argument {name[12:]}" else: msg += f'argument "{name}"' + if ( + not self.options.implicit_optional + and isinstance(arg.initializer, NameExpr) + and arg.initializer.fullname == "builtins.None" + ): + notes = [ + "PEP 484 prohibits implicit Optional. " + "Accordingly, mypy has changed its default to no_implicit_optional=True", + "Use https://github.com/hauntsaninja/no_implicit_optional to automatically " + "upgrade your codebase", + ] + else: + notes = None self.check_simple_assignment( arg.variable.type, arg.initializer, context=arg.initializer, - msg=msg, + msg=ErrorMessage(msg, code=codes.ASSIGNMENT), lvalue_name="argument", rvalue_name="default", - code=codes.ASSIGNMENT, + notes=notes, ) def is_forward_op_method(self, method_name: str) -> bool: @@ -1314,7 +1415,7 @@ def check___new___signature(self, fdef: FuncDef, typ: CallableType) -> None: bound_type = bind_self(typ, self_type, is_classmethod=True) # Check that __new__ (after binding cls) returns an instance # type (or any). - if isinstance(fdef.info, TypeInfo) and fdef.info.is_metaclass(): + if fdef.info.is_metaclass(): # This is a metaclass, so it must return a new unrelated type. self.check_subtype( bound_type.ret_type, @@ -1324,7 +1425,9 @@ def check___new___signature(self, fdef: FuncDef, typ: CallableType) -> None: "returns", "but must return a subtype of", ) - elif not isinstance(get_proper_type(bound_type.ret_type), (AnyType, Instance, TupleType)): + elif not isinstance( + get_proper_type(bound_type.ret_type), (AnyType, Instance, TupleType, UninhabitedType) + ): self.fail( message_registry.NON_INSTANCE_NEW_TYPE.format(format_type(bound_type.ret_type)), fdef, @@ -1617,6 +1720,8 @@ def check_slots_definition(self, typ: Type, context: Context) -> None: def check_match_args(self, var: Var, typ: Type, context: Context) -> None: """Check that __match_args__ contains literal strings""" + if not self.scope.active_class(): + return typ = get_proper_type(typ) if not isinstance(typ, TupleType) or not all( [is_string_literal(item) for item in typ.items] @@ -1719,6 +1824,7 @@ def check_method_override_for_base_with_name( context = defn.func # Construct the type of the overriding method. + # TODO: this logic is much less complete than similar one in checkmember.py if isinstance(defn, (FuncDef, OverloadedFuncDef)): typ: Type = self.function_type(defn) override_class_or_static = defn.is_class or defn.is_static @@ -1768,15 +1874,54 @@ def check_method_override_for_base_with_name( original_class_or_static = fdef.is_class or fdef.is_static else: original_class_or_static = False # a variable can't be class or static + + if isinstance(original_type, FunctionLike): + active_self_type = self.scope.active_self_type() + if isinstance(original_type, Overloaded) and active_self_type: + # If we have an overload, filter to overloads that match the self type. + # This avoids false positives for concrete subclasses of generic classes, + # see testSelfTypeOverrideCompatibility for an example. + # It's possible we might want to do this as part of bind_and_map_method + filtered_items = [ + item + for item in original_type.items + if not item.arg_types or is_subtype(active_self_type, item.arg_types[0]) + ] + # If we don't have any filtered_items, maybe it's always a valid override + # of the superclass? However if you get to that point you're in murky type + # territory anyway, so we just preserve the type and have the behaviour match + # that of older versions of mypy. + if filtered_items: + original_type = Overloaded(filtered_items) + original_type = self.bind_and_map_method(base_attr, original_type, defn.info, base) + if original_node and is_property(original_node): + original_type = get_property_type(original_type) + + if isinstance(typ, FunctionLike) and is_property(defn): + typ = get_property_type(typ) + if ( + isinstance(original_node, Var) + and not original_node.is_final + and (not original_node.is_property or original_node.is_settable_property) + and isinstance(defn, Decorator) + ): + # We only give an error where no other similar errors will be given. + if not isinstance(original_type, AnyType): + self.msg.fail( + "Cannot override writeable attribute with read-only property", + # Give an error on function line to match old behaviour. + defn.func, + code=codes.OVERRIDE, + ) + if isinstance(original_type, AnyType) or isinstance(typ, AnyType): pass elif isinstance(original_type, FunctionLike) and isinstance(typ, FunctionLike): - original = self.bind_and_map_method(base_attr, original_type, defn.info, base) # Check that the types are compatible. # TODO overloaded signatures self.check_override( typ, - original, + original_type, defn.name, name, base.name, @@ -1791,8 +1936,8 @@ def check_method_override_for_base_with_name( # pass elif ( - base_attr.node - and not self.is_writable_attribute(base_attr.node) + original_node + and not self.is_writable_attribute(original_node) and is_subtype(typ, original_type) ): # If the attribute is read-only, allow covariance @@ -1875,7 +2020,7 @@ def check_override( ): fail = True op_method_wider_note = True - if isinstance(original, FunctionLike) and isinstance(override, FunctionLike): + if isinstance(override, FunctionLike): if original_class_or_static and not override_class_or_static: fail = True elif isinstance(original, CallableType) and isinstance(override, CallableType): @@ -1887,6 +2032,13 @@ def check_override( if fail: emitted_msg = False + + # Normalize signatures, so we get better diagnostics. + if isinstance(override, (CallableType, Overloaded)): + override = override.with_unpacked_kwargs() + if isinstance(original, (CallableType, Overloaded)): + original = original.with_unpacked_kwargs() + if ( isinstance(override, CallableType) and isinstance(original, CallableType) @@ -1915,7 +2067,13 @@ def erase_override(t: Type) -> Type: if not is_subtype( original.arg_types[i], erase_override(override.arg_types[i]) ): + arg_type_in_super = original.arg_types[i] + + if isinstance(node, FuncDef): + context: Context = node.arguments[i + len(override.bound_args)] + else: + context = node self.msg.argument_incompatible_with_supertype( i + 1, name, @@ -1923,7 +2081,8 @@ def erase_override(t: Type) -> Type: name_in_super, arg_type_in_super, supertype, - node, + context, + secondary_context=node, ) emitted_msg = True @@ -2009,6 +2168,7 @@ def visit_class_def(self, defn: ClassDef) -> None: if not defn.has_incompatible_baseclass: # Otherwise we've already found errors; more errors are not useful self.check_multiple_inheritance(typ) + self.check_metaclass_compatibility(typ) self.check_final_deletable(typ) if defn.decorators: @@ -2025,7 +2185,7 @@ def visit_class_def(self, defn: ClassDef) -> None: temp = self.temp_node(sig, context=decorator) fullname = None if isinstance(decorator, RefExpr): - fullname = decorator.fullname + fullname = decorator.fullname or None # TODO: Figure out how to have clearer error messages. # (e.g. "class decorator must be a function that accepts a type." @@ -2037,6 +2197,24 @@ def visit_class_def(self, defn: ClassDef) -> None: self.allow_abstract_call = old_allow_abstract_call # TODO: Apply the sig to the actual TypeInfo so we can handle decorators # that completely swap out the type. (e.g. Callable[[Type[A]], Type[B]]) + if typ.defn.type_vars: + for base_inst in typ.bases: + for base_tvar, base_decl_tvar in zip( + base_inst.args, base_inst.type.defn.type_vars + ): + if ( + isinstance(base_tvar, TypeVarType) + and base_tvar.variance != INVARIANT + and isinstance(base_decl_tvar, TypeVarType) + and base_decl_tvar.variance != base_tvar.variance + ): + self.fail( + f'Variance of TypeVar "{base_tvar.name}" incompatible ' + "with variance in parent type", + context=defn, + code=codes.TYPE_VAR, + ) + if typ.is_protocol and typ.defn.type_vars: self.check_protocol_variance(defn) if not defn.has_incompatible_baseclass and defn.info.is_enum: @@ -2145,9 +2323,7 @@ def is_final_enum_value(self, sym: SymbolTableNode) -> bool: ): return False - if self.is_stub or sym.node.has_explicit_value: - return True - return False + return self.is_stub or sym.node.has_explicit_value def check_enum_bases(self, defn: ClassDef) -> None: """ @@ -2254,18 +2430,27 @@ def check_multiple_inheritance(self, typ: TypeInfo) -> None: if name in base2.names and base2 not in base.mro: self.check_compatibility(name, base, base2, typ) - def determine_type_of_class_member(self, sym: SymbolTableNode) -> Type | None: + def determine_type_of_member(self, sym: SymbolTableNode) -> Type | None: if sym.type is not None: return sym.type if isinstance(sym.node, FuncBase): return self.function_type(sym.node) if isinstance(sym.node, TypeInfo): - # nested class - return type_object_type(sym.node, self.named_type) + if sym.node.typeddict_type: + # We special-case TypedDict, because they don't define any constructor. + return self.expr_checker.typeddict_callable(sym.node) + else: + return type_object_type(sym.node, self.named_type) if isinstance(sym.node, TypeVarExpr): # Use of TypeVars is rejected in an expression/runtime context, so # we don't need to check supertype compatibility for them. return AnyType(TypeOfAny.special_form) + if isinstance(sym.node, TypeAlias): + with self.msg.filter_errors(): + # Suppress any errors, they will be given when analyzing the corresponding node. + # Here we may have incorrect options and location context. + return self.expr_checker.alias_type_in_runtime_context(sym.node, ctx=sym.node) + # TODO: handle more node kinds here. return None def check_compatibility( @@ -2296,8 +2481,8 @@ class C(B, A[int]): ... # this is unsafe because... return first = base1.names[name] second = base2.names[name] - first_type = get_proper_type(self.determine_type_of_class_member(first)) - second_type = get_proper_type(self.determine_type_of_class_member(second)) + first_type = get_proper_type(self.determine_type_of_member(first)) + second_type = get_proper_type(self.determine_type_of_member(second)) if isinstance(first_type, FunctionLike) and isinstance(second_type, FunctionLike): if first_type.is_type_obj() and second_type.is_type_obj(): @@ -2313,11 +2498,20 @@ class C(B, A[int]): ... # this is unsafe because... second_sig = self.bind_and_map_method(second, second_type, ctx, base2) ok = is_subtype(first_sig, second_sig, ignore_pos_arg_names=True) elif first_type and second_type: + if isinstance(first.node, Var): + first_type = expand_self_type(first.node, first_type, fill_typevars(ctx)) + if isinstance(second.node, Var): + second_type = expand_self_type(second.node, second_type, fill_typevars(ctx)) ok = is_equivalent(first_type, second_type) if not ok: second_node = base2[name].node - if isinstance(second_node, Decorator) and second_node.func.is_property: - ok = is_subtype(first_type, cast(CallableType, second_type).ret_type) + if ( + isinstance(second_type, FunctionLike) + and second_node is not None + and is_property(second_node) + ): + second_type = get_property_type(second_type) + ok = is_subtype(first_type, second_type) else: if first_type is None: self.msg.cannot_determine_type_in_base(name, base1.name, ctx) @@ -2337,14 +2531,43 @@ class C(B, A[int]): ... # this is unsafe because... if not ok: self.msg.base_class_definitions_incompatible(name, base1, base2, ctx) + def check_metaclass_compatibility(self, typ: TypeInfo) -> None: + """Ensures that metaclasses of all parent types are compatible.""" + if ( + typ.is_metaclass() + or typ.is_protocol + or typ.is_named_tuple + or typ.is_enum + or typ.typeddict_type is not None + ): + return # Reasonable exceptions from this check + + metaclasses = [ + entry.metaclass_type + for entry in typ.mro[1:-1] + if entry.metaclass_type + and not is_named_instance(entry.metaclass_type, "builtins.type") + ] + if not metaclasses: + return + if typ.metaclass_type is not None and all( + is_subtype(typ.metaclass_type, meta) for meta in metaclasses + ): + return + self.fail( + "Metaclass conflict: the metaclass of a derived class must be " + "a (non-strict) subclass of the metaclasses of all its bases", + typ, + ) + def visit_import_from(self, node: ImportFrom) -> None: self.check_import(node) def visit_import_all(self, node: ImportAll) -> None: self.check_import(node) - def visit_import(self, s: Import) -> None: - pass + def visit_import(self, node: Import) -> None: + self.check_import(node) def check_import(self, node: ImportBase) -> None: for assign in node.assignments: @@ -2353,8 +2576,8 @@ def check_import(self, node: ImportBase) -> None: if lvalue_type is None: # TODO: This is broken. lvalue_type = AnyType(TypeOfAny.special_form) - message = '{} "{}"'.format( - message_registry.INCOMPATIBLE_IMPORT_OF, cast(NameExpr, assign.rvalue).name + message = message_registry.INCOMPATIBLE_IMPORT_OF.format( + cast(NameExpr, assign.rvalue).name ) self.check_simple_assignment( lvalue_type, @@ -2387,6 +2610,7 @@ def should_report_unreachable_issues(self) -> bool: return ( self.in_checked_function() and self.options.warn_unreachable + and not self.current_node_deferred and not self.binder.is_unreachable_warning_suppressed() ) @@ -2467,27 +2691,11 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: ): self.fail(message_registry.DEPENDENT_FINAL_IN_CLASS_BODY, s) - def check_type_alias_rvalue(self, s: AssignmentStmt) -> None: - if not (self.is_stub and isinstance(s.rvalue, OpExpr) and s.rvalue.op == "|"): - # We do this mostly for compatibility with old semantic analyzer. - # TODO: should we get rid of this? - alias_type = self.expr_checker.accept(s.rvalue) - else: - # Avoid type checking 'X | Y' in stubs, since there can be errors - # on older Python targets. - alias_type = AnyType(TypeOfAny.special_form) - - def accept_items(e: Expression) -> None: - if isinstance(e, OpExpr) and e.op == "|": - accept_items(e.left) - accept_items(e.right) - else: - # Nested union types have been converted to type context - # in semantic analysis (such as in 'list[int | str]'), - # so we don't need to deal with them here. - self.expr_checker.accept(e) + if s.unanalyzed_type and not self.in_checked_function(): + self.msg.annotation_in_unchecked_function(context=s) - accept_items(s.rvalue) + def check_type_alias_rvalue(self, s: AssignmentStmt) -> None: + alias_type = self.expr_checker.accept(s.rvalue) self.store_type(s.lvalues[-1], alias_type) def check_assignment( @@ -2550,8 +2758,8 @@ def check_assignment( # None initializers preserve the partial None type. return - if is_valid_inferred_type(rvalue_type): - var = lvalue_type.var + var = lvalue_type.var + if is_valid_inferred_type(rvalue_type, is_lvalue_final=var.is_final): partial_types = self.find_partial_types(var) if partial_types is not None: if not self.current_node_deferred: @@ -2617,9 +2825,7 @@ def check_assignment( lvalue_type = make_optional_type(lvalue_type) self.set_inferred_type(lvalue.node, lvalue, lvalue_type) - rvalue_type = self.check_simple_assignment( - lvalue_type, rvalue, context=rvalue, code=codes.ASSIGNMENT - ) + rvalue_type = self.check_simple_assignment(lvalue_type, rvalue, context=rvalue) # Special case: only non-abstract non-protocol classes can be assigned to # variables with explicit type Type[A], where A is protocol or abstract. @@ -2649,7 +2855,8 @@ def check_assignment( self.check_indexed_assignment(index_lvalue, rvalue, lvalue) if inferred: - rvalue_type = self.expr_checker.accept(rvalue) + type_context = self.get_variable_type_context(inferred) + rvalue_type = self.expr_checker.accept(rvalue, type_context=type_context) if not ( inferred.is_final or (isinstance(lvalue, NameExpr) and lvalue.name == "__match_args__") @@ -2661,6 +2868,29 @@ def check_assignment( # (type, operator) tuples for augmented assignments supported with partial types partial_type_augmented_ops: Final = {("builtins.list", "+"), ("builtins.set", "|")} + def get_variable_type_context(self, inferred: Var) -> Type | None: + type_contexts = [] + if inferred.info: + for base in inferred.info.mro[1:]: + base_type, base_node = self.lvalue_type_from_base(inferred, base) + if ( + base_type + and not (isinstance(base_node, Var) and base_node.invalid_partial_type) + and not isinstance(base_type, PartialType) + ): + type_contexts.append(base_type) + # Use most derived supertype as type context if available. + if not type_contexts: + return None + candidate = type_contexts[0] + for other in type_contexts: + if is_proper_subtype(other, candidate): + candidate = other + elif not is_subtype(candidate, other): + # Multiple incompatible candidates, cannot use any of them as context. + return None + return candidate + def try_infer_partial_generic_type_from_assignment( self, lvalue: Lvalue, rvalue: Expression, op: str ) -> None: @@ -2736,12 +2966,8 @@ def check_compatibility_all_supers( # The type of "__slots__" and some other attributes usually doesn't need to # be compatible with a base class. We'll still check the type of "__slots__" # against "object" as an exception. - if ( - isinstance(lvalue_node, Var) - and lvalue_node.allow_incompatible_override - and not ( - lvalue_node.name == "__slots__" and base.fullname == "builtins.object" - ) + if lvalue_node.allow_incompatible_override and not ( + lvalue_node.name == "__slots__" and base.fullname == "builtins.object" ): continue @@ -2749,6 +2975,8 @@ def check_compatibility_all_supers( continue base_type, base_node = self.lvalue_type_from_base(lvalue_node, base) + if isinstance(base_type, PartialType): + base_type = None if base_type: assert base_node is not None @@ -2826,7 +3054,6 @@ def check_compatibility_super( message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, "expression has type", f'base class "{base.name}" defined the type as', - code=codes.ASSIGNMENT, ) return True @@ -2841,6 +3068,8 @@ def lvalue_type_from_base( if base_var: base_node = base_var.node base_type = base_var.type + if isinstance(base_node, Var) and base_type is not None: + base_type = expand_self_type(base_node, base_type, fill_typevars(expr_node.info)) if isinstance(base_node, Decorator): base_node = base_node.func base_type = base_node.type @@ -3065,7 +3294,7 @@ def check_assignment_to_multiple_lvalues( last_idx: int | None = None for idx_rval, rval in enumerate(rvalue.items): if isinstance(rval, StarExpr): - typs = get_proper_type(self.expr_checker.visit_star_expr(rval).type) + typs = get_proper_type(self.expr_checker.accept(rval.expr)) if isinstance(typs, TupleType): rvalues.extend([TempNode(typ) for typ in typs.items]) elif self.type_is_iterable(typs) and isinstance(typs, Instance): @@ -3088,7 +3317,7 @@ def check_assignment_to_multiple_lvalues( iterable_end: int | None = None for i, rval in enumerate(rvalues): if isinstance(rval, StarExpr): - typs = get_proper_type(self.expr_checker.visit_star_expr(rval).type) + typs = get_proper_type(self.expr_checker.accept(rval.expr)) if self.type_is_iterable(typs) and isinstance(typs, Instance): if iterable_start is None: iterable_start = i @@ -3161,6 +3390,9 @@ def check_multi_assignment( # TODO: maybe elsewhere; redundant. rvalue_type = get_proper_type(rv_type or self.expr_checker.accept(rvalue)) + if isinstance(rvalue_type, TypeVarLikeType): + rvalue_type = get_proper_type(rvalue_type.upper_bound) + if isinstance(rvalue_type, UnionType): # If this is an Optional type in non-strict Optional code, unwrap it. relevant_items = rvalue_type.relevant_items() @@ -3242,8 +3474,7 @@ def check_multi_assignment_from_union( assert declared_type is not None clean_items.append((type, declared_type)) - # TODO: fix signature of zip() in typeshed. - types, declared_types = cast(Any, zip)(*clean_items) + types, declared_types = zip(*clean_items) self.binder.assign_type( expr, make_simplified_union(list(types)), @@ -3449,8 +3680,7 @@ def check_lvalue(self, lvalue: Lvalue) -> tuple[Type | None, IndexExpr | None, V ] lvalue_type = TupleType(types, self.named_type("builtins.tuple")) elif isinstance(lvalue, StarExpr): - typ, _, _ = self.check_lvalue(lvalue.expr) - lvalue_type = StarType(typ) if typ else None + lvalue_type, _, _ = self.check_lvalue(lvalue.expr) else: lvalue_type = self.expr_checker.accept(lvalue) @@ -3478,7 +3708,10 @@ def infer_variable_type( """Infer the type of initialized variables from initializer type.""" if isinstance(init_type, DeletedType): self.msg.deleted_as_rvalue(init_type, context) - elif not is_valid_inferred_type(init_type) and not self.no_partial_types: + elif ( + not is_valid_inferred_type(init_type, is_lvalue_final=name.is_final) + and not self.no_partial_types + ): # We cannot use the type of the initialization expression for full type # inference (it's not specific enough), but we might be able to give # partial type which will be made more specific later. A partial type @@ -3606,16 +3839,33 @@ def inference_error_fallback_type(self, type: Type) -> Type: # we therefore need to erase them. return erase_typevars(fallback) + def simple_rvalue(self, rvalue: Expression) -> bool: + """Returns True for expressions for which inferred type should not depend on context. + + Note that this function can still return False for some expressions where inferred type + does not depend on context. It only exists for performance optimizations. + """ + if isinstance(rvalue, (IntExpr, StrExpr, BytesExpr, FloatExpr, RefExpr)): + return True + if isinstance(rvalue, CallExpr): + if isinstance(rvalue.callee, RefExpr) and isinstance(rvalue.callee.node, FuncBase): + typ = rvalue.callee.node.type + if isinstance(typ, CallableType): + return not typ.variables + elif isinstance(typ, Overloaded): + return not any(item.variables for item in typ.items) + return False + def check_simple_assignment( self, lvalue_type: Type | None, rvalue: Expression, context: Context, - msg: str = message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, + msg: ErrorMessage = message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, lvalue_name: str = "variable", rvalue_name: str = "expression", *, - code: ErrorCode | None = None, + notes: list[str] | None = None, ) -> Type: if self.is_stub and isinstance(rvalue, EllipsisExpr): # '...' is always a valid initializer in a stub. @@ -3627,6 +3877,30 @@ def check_simple_assignment( rvalue_type = self.expr_checker.accept( rvalue, lvalue_type, always_allow_any=always_allow_any ) + if ( + isinstance(get_proper_type(lvalue_type), UnionType) + # Skip literal types, as they have special logic (for better errors). + and not isinstance(get_proper_type(rvalue_type), LiteralType) + and not self.simple_rvalue(rvalue) + ): + # Try re-inferring r.h.s. in empty context, and use that if it + # results in a narrower type. We don't do this always because this + # may cause some perf impact, plus we want to partially preserve + # the old behavior. This helps with various practical examples, see + # e.g. testOptionalTypeNarrowedByGenericCall. + with self.msg.filter_errors() as local_errors, self.local_type_map() as type_map: + alt_rvalue_type = self.expr_checker.accept( + rvalue, None, always_allow_any=always_allow_any + ) + if ( + not local_errors.has_new_errors() + # Skip Any type, since it is special cased in binder. + and not isinstance(get_proper_type(alt_rvalue_type), AnyType) + and is_valid_inferred_type(alt_rvalue_type) + and is_proper_subtype(alt_rvalue_type, rvalue_type) + ): + rvalue_type = alt_rvalue_type + self.store_types(type_map) if isinstance(rvalue_type, DeletedType): self.msg.deleted_as_rvalue(rvalue_type, context) if isinstance(lvalue_type, DeletedType): @@ -3640,7 +3914,7 @@ def check_simple_assignment( msg, f"{rvalue_name} has type", f"{lvalue_name} has type", - code=code, + notes=notes, ) return rvalue_type @@ -3664,16 +3938,12 @@ def check_member_assignment( if (isinstance(instance_type, FunctionLike) and instance_type.is_type_obj()) or isinstance( instance_type, TypeType ): - rvalue_type = self.check_simple_assignment( - attribute_type, rvalue, context, code=codes.ASSIGNMENT - ) + rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context) return rvalue_type, attribute_type, True if not isinstance(attribute_type, Instance): # TODO: support __set__() for union types. - rvalue_type = self.check_simple_assignment( - attribute_type, rvalue, context, code=codes.ASSIGNMENT - ) + rvalue_type = self.check_simple_assignment(attribute_type, rvalue, context) return rvalue_type, attribute_type, True mx = MemberContext( @@ -3692,9 +3962,7 @@ def check_member_assignment( # the return type of __get__. This doesn't match the python semantics, # (which allow you to override the descriptor with any value), but preserves # the type of accessing the attribute (even after the override). - rvalue_type = self.check_simple_assignment( - get_type, rvalue, context, code=codes.ASSIGNMENT - ) + rvalue_type = self.check_simple_assignment(get_type, rvalue, context) return rvalue_type, get_type, True dunder_set = attribute_type.type.get_method("__set__") @@ -3761,9 +4029,7 @@ def check_member_assignment( # and '__get__' type is narrower than '__set__', then we invoke the binder to narrow type # by this assignment. Technically, this is not safe, but in practice this is # what a user expects. - rvalue_type = self.check_simple_assignment( - set_type, rvalue, context, code=codes.ASSIGNMENT - ) + rvalue_type = self.check_simple_assignment(set_type, rvalue, context) infer = is_subtype(rvalue_type, get_type) and is_subtype(get_type, set_type) return rvalue_type if infer else set_type, get_type, infer @@ -3937,7 +4203,6 @@ def check_return_stmt(self, s: ReturnStmt) -> None: context=s.expr, outer_context=s, msg=message_registry.INCOMPATIBLE_RETURN_VALUE_TYPE, - code=codes.RETURN_VALUE, ) else: # Empty returns are valid in Generators with Any typed returns, but not in @@ -4105,20 +4370,13 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: with self.binder.frame_context(can_skip=True, fall_through=4): typ = s.types[i] if typ: - t = self.check_except_handler_test(typ) + t = self.check_except_handler_test(typ, s.is_star) var = s.vars[i] if var: # To support local variables, we make this a definition line, # causing assignment to set the variable's type. var.is_inferred_def = True - # We also temporarily set current_node_deferred to False to - # make sure the inference happens. - # TODO: Use a better solution, e.g. a - # separate Var for each except block. - am_deferring = self.current_node_deferred - self.current_node_deferred = False self.check_assignment(var, self.temp_node(t, var)) - self.current_node_deferred = am_deferring self.accept(s.handlers[i]) var = s.vars[i] if var: @@ -4132,7 +4390,7 @@ def visit_try_without_finally(self, s: TryStmt, try_frame: bool) -> None: if s.else_body: self.accept(s.else_body) - def check_except_handler_test(self, n: Expression) -> Type: + def check_except_handler_test(self, n: Expression, is_star: bool) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) @@ -4148,22 +4406,47 @@ def check_except_handler_test(self, n: Expression) -> Type: item = ttype.items[0] if not item.is_type_obj(): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) - exc_type = item.ret_type + return self.default_exception_type(is_star) + exc_type = erase_typevars(item.ret_type) elif isinstance(ttype, TypeType): exc_type = ttype.item else: self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) if not is_subtype(exc_type, self.named_type("builtins.BaseException")): self.fail(message_registry.INVALID_EXCEPTION_TYPE, n) - return AnyType(TypeOfAny.from_error) + return self.default_exception_type(is_star) all_types.append(exc_type) + if is_star: + new_all_types: list[Type] = [] + for typ in all_types: + if is_proper_subtype(typ, self.named_type("builtins.BaseExceptionGroup")): + self.fail(message_registry.INVALID_EXCEPTION_GROUP, n) + new_all_types.append(AnyType(TypeOfAny.from_error)) + else: + new_all_types.append(typ) + return self.wrap_exception_group(new_all_types) return make_simplified_union(all_types) + def default_exception_type(self, is_star: bool) -> Type: + """Exception type to return in case of a previous type error.""" + any_type = AnyType(TypeOfAny.from_error) + if is_star: + return self.named_generic_type("builtins.ExceptionGroup", [any_type]) + return any_type + + def wrap_exception_group(self, types: Sequence[Type]) -> Type: + """Transform except* variable type into an appropriate exception group.""" + arg = make_simplified_union(types) + if is_subtype(arg, self.named_type("builtins.Exception")): + base = "builtins.ExceptionGroup" + else: + base = "builtins.BaseExceptionGroup" + return self.named_generic_type(base, [arg]) + def get_types_from_except_handler(self, typ: Type, n: Expression) -> list[Type]: """Helper for check_except_handler_test to retrieve handler types.""" typ = get_proper_type(typ) @@ -4175,7 +4458,7 @@ def get_types_from_except_handler(self, typ: Type, n: Expression) -> list[Type]: for item in typ.relevant_items() for union_typ in self.get_types_from_except_handler(item, n) ] - elif isinstance(typ, Instance) and is_named_instance(typ, "builtins.tuple"): + elif is_named_instance(typ, "builtins.tuple"): # variadic tuple return [typ.args[0]] else: @@ -4209,6 +4492,10 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: iterable = get_proper_type(echk.accept(expr)) iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], expr)[0] + int_type = self.analyze_range_native_int_type(expr) + if int_type: + return iterator, int_type + if isinstance(iterable, TupleType): joined: Type = UninhabitedType() for item in iterable.items: @@ -4218,6 +4505,53 @@ def analyze_iterable_item_type(self, expr: Expression) -> tuple[Type, Type]: # Non-tuple iterable. return iterator, echk.check_method_call_by_name("__next__", iterator, [], [], expr)[0] + def analyze_iterable_item_type_without_expression( + self, type: Type, context: Context + ) -> tuple[Type, Type]: + """Analyse iterable type and return iterator and iterator item types.""" + echk = self.expr_checker + iterable = get_proper_type(type) + iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], context)[0] + + if isinstance(iterable, TupleType): + joined: Type = UninhabitedType() + for item in iterable.items: + joined = join_types(joined, item) + return iterator, joined + else: + # Non-tuple iterable. + return ( + iterator, + echk.check_method_call_by_name("__next__", iterator, [], [], context)[0], + ) + + def analyze_range_native_int_type(self, expr: Expression) -> Type | None: + """Try to infer native int item type from arguments to range(...). + + For example, return i64 if the expression is "range(0, i64(n))". + + Return None if unsuccessful. + """ + if ( + isinstance(expr, CallExpr) + and isinstance(expr.callee, RefExpr) + and expr.callee.fullname == "builtins.range" + and 1 <= len(expr.args) <= 3 + and all(kind == ARG_POS for kind in expr.arg_kinds) + ): + native_int: Type | None = None + ok = True + for arg in expr.args: + argt = get_proper_type(self.lookup_type(arg)) + if isinstance(argt, Instance) and argt.type.fullname in MYPYC_NATIVE_INT_NAMES: + if native_int is None: + native_int = argt + elif argt != native_int: + ok = False + if ok and native_int: + return native_int + return None + def analyze_container_item_type(self, typ: Type) -> Type | None: """Check if a type is a nominal container of a union of such. @@ -4287,7 +4621,7 @@ def visit_decorator(self, e: Decorator) -> None: temp = self.temp_node(sig, context=e) fullname = None if isinstance(d, RefExpr): - fullname = d.fullname + fullname = d.fullname or None # if this is a expression like @b.a where b is an object, get the type of b # so we can pass it the method hook in the plugins object_type: Type | None = None @@ -4307,7 +4641,8 @@ def visit_decorator(self, e: Decorator) -> None: if len([k for k in sig.arg_kinds if k.is_required()]) > 1: self.msg.fail("Too many arguments for property", e) self.check_incompatible_property_override(e) - if e.func.info and not e.func.is_dynamic(): + # For overloaded functions we already checked override for overload as a whole. + if e.func.info and not e.func.is_dynamic() and not e.is_overload: self.check_method_override(e) if e.func.info and e.func.name in ("__init__", "__new__"): @@ -4350,7 +4685,7 @@ def visit_with_stmt(self, s: WithStmt) -> None: # exceptions or not. We determine this using a heuristic based on the # return type of the __exit__ method -- see the discussion in # https://github.com/python/mypy/issues/7214 and the section about context managers - # in https://github.com/python/typeshed/blob/master/CONTRIBUTING.md#conventions + # in https://github.com/python/typeshed/blob/main/CONTRIBUTING.md#conventions # for more details. exit_ret_type = get_proper_type(exit_ret_type) @@ -4544,11 +4879,11 @@ def make_fake_typeinfo( cdef.info = info info.bases = bases calculate_mro(info) - info.calculate_metaclass_type() + info.metaclass_type = info.calculate_metaclass_type() return cdef, info def intersect_instances( - self, instances: tuple[Instance, Instance], ctx: Context + self, instances: tuple[Instance, Instance], errors: list[tuple[str, str]] ) -> Instance | None: """Try creating an ad-hoc intersection of the given instances. @@ -4575,6 +4910,17 @@ def intersect_instances( curr_module = self.scope.stack[0] assert isinstance(curr_module, MypyFile) + # First, retry narrowing while allowing promotions (they are disabled by default + # for isinstance() checks, etc). This way we will still type-check branches like + # x: complex = 1 + # if isinstance(x, int): + # ... + left, right = instances + if is_proper_subtype(left, right, ignore_promotions=False): + return left + if is_proper_subtype(right, left, ignore_promotions=False): + return right + def _get_base_classes(instances_: tuple[Instance, Instance]) -> list[Instance]: base_classes_ = [] for inst in instances_: @@ -4615,21 +4961,14 @@ def _make_fake_typeinfo_and_full_name( self.check_multiple_inheritance(info) info.is_intersection = True except MroError: - if self.should_report_unreachable_issues(): - self.msg.impossible_intersection( - pretty_names_list, "inconsistent method resolution order", ctx - ) + errors.append((pretty_names_list, "inconsistent method resolution order")) return None - if local_errors.has_new_errors(): - if self.should_report_unreachable_issues(): - self.msg.impossible_intersection( - pretty_names_list, "incompatible method signatures", ctx - ) + errors.append((pretty_names_list, "incompatible method signatures")) return None curr_module.names[full_name] = SymbolTableNode(GDEF, info) - return Instance(info, []) + return Instance(info, [], extra_attrs=instances[0].extra_attrs or instances[1].extra_attrs) def intersect_instance_callable(self, typ: Instance, callable_type: CallableType) -> Instance: """Creates a fake type that represents the intersection of an Instance and a CallableType. @@ -4656,7 +4995,7 @@ def intersect_instance_callable(self, typ: Instance, callable_type: CallableType cur_module.names[gen_name] = SymbolTableNode(GDEF, info) - return Instance(info, []) + return Instance(info, [], extra_attrs=typ.extra_attrs) def make_fake_callable(self, typ: Instance) -> Instance: """Produce a new type that makes type Callable with a generic callable type.""" @@ -4789,6 +5128,45 @@ def conditional_callable_type_map( return None, {} + def conditional_types_for_iterable( + self, item_type: Type, iterable_type: Type + ) -> tuple[Type | None, Type | None]: + """ + Narrows the type of `iterable_type` based on the type of `item_type`. + For now, we only support narrowing unions of TypedDicts based on left operand being literal string(s). + """ + if_types: list[Type] = [] + else_types: list[Type] = [] + + iterable_type = get_proper_type(iterable_type) + if isinstance(iterable_type, UnionType): + possible_iterable_types = get_proper_types(iterable_type.relevant_items()) + else: + possible_iterable_types = [iterable_type] + + item_str_literals = try_getting_str_literals_from_type(item_type) + + for possible_iterable_type in possible_iterable_types: + if item_str_literals and isinstance(possible_iterable_type, TypedDictType): + for key in item_str_literals: + if key in possible_iterable_type.required_keys: + if_types.append(possible_iterable_type) + elif ( + key in possible_iterable_type.items or not possible_iterable_type.is_final + ): + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + else: + else_types.append(possible_iterable_type) + else: + if_types.append(possible_iterable_type) + else_types.append(possible_iterable_type) + + return ( + UnionType.make_union(if_types) if if_types else None, + UnionType.make_union(else_types) if else_types else None, + ) + def _is_truthy_type(self, t: ProperType) -> bool: return ( ( @@ -4796,6 +5174,7 @@ def _is_truthy_type(self, t: ProperType) -> bool: and bool(t.type) and not t.type.has_readable_member("__bool__") and not t.type.has_readable_member("__len__") + and t.type.fullname != "builtins.object" ) or isinstance(t, FunctionLike) or ( @@ -4831,6 +5210,14 @@ def format_expr_type() -> str: self.fail(message_registry.FUNCTION_ALWAYS_TRUE.format(format_type(t)), expr) elif isinstance(t, UnionType): self.fail(message_registry.TYPE_ALWAYS_TRUE_UNIONTYPE.format(format_expr_type()), expr) + elif isinstance(t, Instance) and t.type.fullname == "typing.Iterable": + _, info = self.make_fake_typeinfo("typing", "Collection", "Collection", []) + self.fail( + message_registry.ITERABLE_ALWAYS_TRUE.format( + format_expr_type(), format_type(Instance(info, t.args)) + ), + expr, + ) else: self.fail(message_registry.TYPE_ALWAYS_TRUE.format(format_expr_type()), expr) @@ -4960,12 +5347,34 @@ def find_isinstance_check_helper(self, node: Expression) -> tuple[TypeMap, TypeM if literal(expr) == LITERAL_TYPE: vartype = self.lookup_type(expr) return self.conditional_callable_type_map(expr, vartype) + elif refers_to_fullname(node.callee, "builtins.hasattr"): + if len(node.args) != 2: # the error will be reported elsewhere + return {}, {} + attr = try_getting_str_literals(node.args[1], self.lookup_type(node.args[1])) + if literal(expr) == LITERAL_TYPE and attr and len(attr) == 1: + return self.hasattr_type_maps(expr, self.lookup_type(expr), attr[0]) elif isinstance(node.callee, RefExpr): if node.callee.type_guard is not None: - # TODO: Follow keyword args or *args, **kwargs + # TODO: Follow *args, **kwargs if node.arg_kinds[0] != nodes.ARG_POS: - self.fail(message_registry.TYPE_GUARD_POS_ARG_REQUIRED, node) - return {}, {} + # the first argument might be used as a kwarg + called_type = get_proper_type(self.lookup_type(node.callee)) + assert isinstance(called_type, (CallableType, Overloaded)) + + # *assuming* the overloaded function is correct, there's a couple cases: + # 1) The first argument has different names, but is pos-only. We don't + # care about this case, the argument must be passed positionally. + # 2) The first argument allows keyword reference, therefore must be the + # same between overloads. + name = called_type.items[0].arg_names[0] + + if name in node.arg_names: + idx = node.arg_names.index(name) + # we want the idx-th variable to be narrowed + expr = collapse_walrus(node.args[idx]) + else: + self.fail(message_registry.TYPE_GUARD_POS_ARG_REQUIRED, node) + return {}, {} if literal(expr) == LITERAL_TYPE: # Note: we wrap the target type, so that we can special case later. # Namely, for isinstance() we use a normal meet, while TypeGuard is @@ -5090,28 +5499,42 @@ def has_no_custom_eq_checks(t: Type) -> bool: elif operator in {"in", "not in"}: assert len(expr_indices) == 2 left_index, right_index = expr_indices - if left_index not in narrowable_operand_index_to_hash: - continue - item_type = operand_types[left_index] - collection_type = operand_types[right_index] + iterable_type = operand_types[right_index] - # We only try and narrow away 'None' for now - if not is_optional(item_type): - continue + if_map, else_map = {}, {} + + if left_index in narrowable_operand_index_to_hash: + # We only try and narrow away 'None' for now + if is_optional(item_type): + collection_item_type = get_proper_type( + builtin_item_type(iterable_type) + ) + if ( + collection_item_type is not None + and not is_optional(collection_item_type) + and not ( + isinstance(collection_item_type, Instance) + and collection_item_type.type.fullname == "builtins.object" + ) + and is_overlapping_erased_types(item_type, collection_item_type) + ): + if_map[operands[left_index]] = remove_optional(item_type) + + if right_index in narrowable_operand_index_to_hash: + if_type, else_type = self.conditional_types_for_iterable( + item_type, iterable_type + ) + expr = operands[right_index] + if if_type is None: + if_map = None + else: + if_map[expr] = if_type + if else_type is None: + else_map = None + else: + else_map[expr] = else_type - collection_item_type = get_proper_type(builtin_item_type(collection_type)) - if collection_item_type is None or is_optional(collection_item_type): - continue - if ( - isinstance(collection_item_type, Instance) - and collection_item_type.type.fullname == "builtins.object" - ): - continue - if is_overlapping_erased_types(item_type, collection_item_type): - if_map, else_map = {operands[left_index]: remove_optional(item_type)}, {} - else: - continue else: if_map = {} else_map = {} @@ -5243,7 +5666,7 @@ def refine_parent_types(self, expr: Expression, expr_type: Type) -> Mapping[Expr # and create function that will try replaying the same lookup # operation against arbitrary types. if isinstance(expr, MemberExpr): - parent_expr = expr.expr + parent_expr = collapse_walrus(expr.expr) parent_type = self.lookup_type_or_none(parent_expr) member_name = expr.name @@ -5267,7 +5690,7 @@ def replay_lookup(new_parent_type: ProperType) -> Type | None: return member_type elif isinstance(expr, IndexExpr): - parent_expr = expr.base + parent_expr = collapse_walrus(expr.base) parent_type = self.lookup_type_or_none(parent_expr) index_type = self.lookup_type_or_none(expr.index) @@ -5509,16 +5932,47 @@ def refine_away_none_in_comparison( # # Helpers # + @overload + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: str, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + code: ErrorCode | None = None, + outer_context: Context | None = None, + ) -> bool: + ... + + @overload + def check_subtype( + self, + subtype: Type, + supertype: Type, + context: Context, + msg: ErrorMessage, + subtype_label: str | None = None, + supertype_label: str | None = None, + *, + notes: list[str] | None = None, + outer_context: Context | None = None, + ) -> bool: + ... def check_subtype( self, subtype: Type, supertype: Type, context: Context, - msg: str | ErrorMessage = message_registry.INCOMPATIBLE_TYPES, + msg: str | ErrorMessage, subtype_label: str | None = None, supertype_label: str | None = None, *, + notes: list[str] | None = None, code: ErrorCode | None = None, outer_context: Context | None = None, ) -> bool: @@ -5526,24 +5980,24 @@ def check_subtype( if is_subtype(subtype, supertype, options=self.options): return True - if isinstance(msg, ErrorMessage): - msg_text = msg.value - code = msg.code - else: - msg_text = msg + if isinstance(msg, str): + msg = ErrorMessage(msg, code=code) + + if self.msg.prefer_simple_messages(): + self.fail(msg, context) # Fast path -- skip all fancy logic + return False + orig_subtype = subtype subtype = get_proper_type(subtype) orig_supertype = supertype supertype = get_proper_type(supertype) if self.msg.try_report_long_tuple_assignment_error( - subtype, supertype, context, msg_text, subtype_label, supertype_label, code=code + subtype, supertype, context, msg, subtype_label, supertype_label ): return False - if self.should_suppress_optional_error([subtype]): - return False extra_info: list[str] = [] note_msg = "" - notes: list[str] = [] + notes = notes or [] if subtype_label is not None or supertype_label is not None: subtype_str, supertype_str = format_type_distinctly(orig_subtype, orig_supertype) if subtype_label is not None: @@ -5554,31 +6008,32 @@ def check_subtype( outer_context or context, subtype, supertype, supertype_str ) if isinstance(subtype, Instance) and isinstance(supertype, Instance): - notes = append_invariance_notes([], subtype, supertype) + notes = append_invariance_notes(notes, subtype, supertype) if extra_info: - msg_text += " (" + ", ".join(extra_info) + ")" + msg = msg.with_additional_msg(" (" + ", ".join(extra_info) + ")") - self.fail(ErrorMessage(msg_text, code=code), context) + self.fail(msg, context) for note in notes: - self.msg.note(note, context, code=code) + self.msg.note(note, context, code=msg.code) if note_msg: - self.note(note_msg, context, code=code) - self.msg.maybe_note_concatenate_pos_args(subtype, supertype, context, code=code) + self.note(note_msg, context, code=msg.code) + self.msg.maybe_note_concatenate_pos_args(subtype, supertype, context, code=msg.code) if ( isinstance(supertype, Instance) and supertype.type.is_protocol - and isinstance(subtype, (Instance, TupleType, TypedDictType)) + and isinstance(subtype, (CallableType, Instance, TupleType, TypedDictType)) ): - self.msg.report_protocol_problems(subtype, supertype, context, code=code) + self.msg.report_protocol_problems(subtype, supertype, context, code=msg.code) if isinstance(supertype, CallableType) and isinstance(subtype, Instance): call = find_member("__call__", subtype, subtype, is_operator=True) if call: - self.msg.note_call(subtype, call, context, code=code) + self.msg.note_call(subtype, call, context, code=msg.code) if isinstance(subtype, (CallableType, Overloaded)) and isinstance(supertype, Instance): - if supertype.type.is_protocol and supertype.type.protocol_members == ["__call__"]: + if supertype.type.is_protocol and "__call__" in supertype.type.protocol_members: call = find_member("__call__", supertype, subtype, is_operator=True) assert call is not None - self.msg.note_call(supertype, call, context, code=code) + if not is_subtype(subtype, call, options=self.options): + self.msg.note_call(supertype, call, context, code=msg.code) self.check_possible_missing_await(subtype, supertype, context) return False @@ -5623,7 +6078,9 @@ def check_possible_missing_await( aw_type = self.get_precise_awaitable_type(subtype, local_errors) if aw_type is None: return - if not self.check_subtype(aw_type, supertype, context): + if not self.check_subtype( + aw_type, supertype, context, msg=message_registry.INCOMPATIBLE_TYPES + ): return self.msg.possible_missing_await(context) @@ -5640,9 +6097,6 @@ def contains_none(self, t: Type) -> bool: ) ) - def should_suppress_optional_error(self, related_types: list[Type]) -> bool: - return self.suppress_none_errors and any(self.contains_none(t) for t in related_types) - def named_type(self, name: str) -> Instance: """Return an instance type with given name and implicit Any type args. @@ -5652,7 +6106,7 @@ def named_type(self, name: str) -> Instance: sym = self.lookup_qualified(name) node = sym.node if isinstance(node, TypeAlias): - assert isinstance(node.target, Instance) # type: ignore + assert isinstance(node.target, Instance) # type: ignore[misc] node = node.target.type assert isinstance(node, TypeInfo) any_type = AnyType(TypeOfAny.from_omitted_generics) @@ -5689,10 +6143,7 @@ def store_type(self, node: Expression, typ: Type) -> None: self._type_maps[-1][node] = typ def has_type(self, node: Expression) -> bool: - for m in reversed(self._type_maps): - if node in m: - return True - return False + return any(node in m for m in reversed(self._type_maps)) def lookup_type_or_none(self, node: Expression) -> Type | None: for m in reversed(self._type_maps): @@ -5759,11 +6210,11 @@ def lookup_qualified(self, name: str) -> SymbolTableNode: last = parts[-1] if last in n.names: return n.names[last] - elif len(parts) == 2 and parts[0] == "builtins": - fullname = "builtins." + last + elif len(parts) == 2 and parts[0] in ("builtins", "typing"): + fullname = ".".join(parts) if fullname in SUGGESTED_TEST_FIXTURES: - suggestion = ", e.g. add '[builtins fixtures/{}]' to your test".format( - SUGGESTED_TEST_FIXTURES[fullname] + suggestion = ", e.g. add '[{} fixtures/{}]' to your test".format( + parts[0], SUGGESTED_TEST_FIXTURES[fullname] ) else: suggestion = "" @@ -5829,7 +6280,9 @@ def enter_partial_types( self.msg.need_annotation_for_var(var, context, self.options.python_version) self.partial_reported.add(var) if var.type: - var.type = self.fixup_partial_type(var.type) + fixed = fixup_partial_type(var.type) + var.invalid_partial_type = fixed != var.type + var.type = fixed def handle_partial_var_type( self, typ: PartialType, is_lvalue: bool, node: Var, context: Context @@ -5858,29 +6311,14 @@ def handle_partial_var_type( else: # Defer the node -- we might get a better type in the outer scope self.handle_cannot_determine_type(node.name, context) - return self.fixup_partial_type(typ) - - def fixup_partial_type(self, typ: Type) -> Type: - """Convert a partial type that we couldn't resolve into something concrete. - - This means, for None we make it Optional[Any], and for anything else we - fill in all of the type arguments with Any. - """ - if not isinstance(typ, PartialType): - return typ - if typ.type is None: - return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) - else: - return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) + return fixup_partial_type(typ) def is_defined_in_base_class(self, var: Var) -> bool: - if var.info: - for base in var.info.mro[1:]: - if base.get(var.name) is not None: - return True - if var.info.fallback_to_any: - return True - return False + if not var.info: + return False + return var.info.fallback_to_any or any( + base.get(var.name) is not None for base in var.info.mro[1:] + ) def find_partial_types(self, var: Var) -> dict[Var, Context] | None: """Look for an active partial type scope containing variable. @@ -5933,9 +6371,17 @@ def fail( self.msg.fail(msg, context, code=code) def note( - self, msg: str, context: Context, offset: int = 0, *, code: ErrorCode | None = None + self, + msg: str | ErrorMessage, + context: Context, + offset: int = 0, + *, + code: ErrorCode | None = None, ) -> None: """Produce a note.""" + if isinstance(msg, ErrorMessage): + self.msg.note(msg.value, context, code=msg.code) + return self.msg.note(msg, context, offset=offset, code=code) def iterable_item_type(self, instance: Instance) -> Type: @@ -5985,7 +6431,7 @@ def infer_issubclass_maps(self, node: CallExpr, expr: Expression) -> tuple[TypeM vartype = UnionType(union_list) elif isinstance(vartype, TypeType): vartype = vartype.item - elif isinstance(vartype, Instance) and vartype.type.fullname == "builtins.type": + elif isinstance(vartype, Instance) and vartype.type.is_metaclass(): vartype = self.named_type("builtins.object") else: # Any other object whose type we don't know precisely @@ -6046,15 +6492,20 @@ def conditional_types_with_intersection( possible_target_types.append(item) out = [] + errors: list[tuple[str, str]] = [] for v in possible_expr_types: if not isinstance(v, Instance): return yes_type, no_type for t in possible_target_types: - intersection = self.intersect_instances((v, t), ctx) + intersection = self.intersect_instances((v, t), errors) if intersection is None: continue out.append(intersection) if len(out) == 0: + # Only report errors if no element in the union worked. + if self.should_report_unreachable_issues(): + for types, reason in errors: + self.msg.impossible_intersection(types, reason, ctx) return UninhabitedType(), expr_type new_yes_type = make_simplified_union(out) return new_yes_type, expr_type @@ -6062,12 +6513,13 @@ def conditional_types_with_intersection( def is_writable_attribute(self, node: Node) -> bool: """Check if an attribute is writable""" if isinstance(node, Var): + if node.is_property and not node.is_settable_property: + return False return True elif isinstance(node, OverloadedFuncDef) and node.is_property: first_item = cast(Decorator, node.items[0]) return first_item.var.is_settable_property - else: - return False + return False def get_isinstance_type(self, expr: Expression) -> list[TypeRange] | None: if isinstance(expr, OpExpr) and expr.op == "|": @@ -6140,8 +6592,104 @@ class Foo(Enum): and member_type.fallback.type == parent_type.type_object() ) + def add_any_attribute_to_type(self, typ: Type, name: str) -> Type: + """Inject an extra attribute with Any type using fallbacks.""" + orig_typ = typ + typ = get_proper_type(typ) + any_type = AnyType(TypeOfAny.unannotated) + if isinstance(typ, Instance): + result = typ.copy_with_extra_attr(name, any_type) + # For instances, we erase the possible module name, so that restrictions + # become anonymous types.ModuleType instances, allowing hasattr() to + # have effect on modules. + assert result.extra_attrs is not None + result.extra_attrs.mod_name = None + return result + if isinstance(typ, TupleType): + fallback = typ.partial_fallback.copy_with_extra_attr(name, any_type) + return typ.copy_modified(fallback=fallback) + if isinstance(typ, CallableType): + fallback = typ.fallback.copy_with_extra_attr(name, any_type) + return typ.copy_modified(fallback=fallback) + if isinstance(typ, TypeType) and isinstance(typ.item, Instance): + return TypeType.make_normalized(self.add_any_attribute_to_type(typ.item, name)) + if isinstance(typ, TypeVarType): + return typ.copy_modified( + upper_bound=self.add_any_attribute_to_type(typ.upper_bound, name), + values=[self.add_any_attribute_to_type(v, name) for v in typ.values], + ) + if isinstance(typ, UnionType): + with_attr, without_attr = self.partition_union_by_attr(typ, name) + return make_simplified_union( + with_attr + [self.add_any_attribute_to_type(typ, name) for typ in without_attr] + ) + return orig_typ + + def hasattr_type_maps( + self, expr: Expression, source_type: Type, name: str + ) -> tuple[TypeMap, TypeMap]: + """Simple support for hasattr() checks. -class CollectArgTypes(TypeTraverserVisitor): + Essentially the logic is following: + * In the if branch, keep types that already has a valid attribute as is, + for other inject an attribute with `Any` type. + * In the else branch, remove types that already have a valid attribute, + while keeping the rest. + """ + if self.has_valid_attribute(source_type, name): + return {expr: source_type}, {} + + source_type = get_proper_type(source_type) + if isinstance(source_type, UnionType): + _, without_attr = self.partition_union_by_attr(source_type, name) + yes_map = {expr: self.add_any_attribute_to_type(source_type, name)} + return yes_map, {expr: make_simplified_union(without_attr)} + + type_with_attr = self.add_any_attribute_to_type(source_type, name) + if type_with_attr != source_type: + return {expr: type_with_attr}, {} + return {}, {} + + def partition_union_by_attr( + self, source_type: UnionType, name: str + ) -> tuple[list[Type], list[Type]]: + with_attr = [] + without_attr = [] + for item in source_type.items: + if self.has_valid_attribute(item, name): + with_attr.append(item) + else: + without_attr.append(item) + return with_attr, without_attr + + def has_valid_attribute(self, typ: Type, name: str) -> bool: + p_typ = get_proper_type(typ) + if isinstance(p_typ, AnyType): + return False + if isinstance(p_typ, Instance) and p_typ.extra_attrs and p_typ.extra_attrs.mod_name: + # Presence of module_symbol_table means this check will skip ModuleType.__getattr__ + module_symbol_table = p_typ.type.names + else: + module_symbol_table = None + with self.msg.filter_errors() as watcher: + analyze_member_access( + name, + typ, + TempNode(AnyType(TypeOfAny.special_form)), + False, + False, + False, + self.msg, + original_type=typ, + chk=self, + # This is not a real attribute lookup so don't mess with deferring nodes. + no_deferral=True, + module_symbol_table=module_symbol_table, + ) + return not watcher.has_new_errors() + + +class CollectArgTypeVarTypes(TypeTraverserVisitor): """Collects the non-nested argument types in a set.""" def __init__(self) -> None: @@ -6193,11 +6741,11 @@ def conditional_types( return proposed_type, default elif not any( type_range.is_upper_bound for type_range in proposed_type_ranges - ) and is_proper_subtype(current_type, proposed_type): + ) and is_proper_subtype(current_type, proposed_type, ignore_promotions=True): # Expression is always of one of the types in proposed_type_ranges return default, UninhabitedType() elif not is_overlapping_types( - current_type, proposed_type, prohibit_none_typevar_overlap=True + current_type, proposed_type, prohibit_none_typevar_overlap=True, ignore_promotions=True ): # Expression is never of any type in proposed_type_ranges return UninhabitedType(), default @@ -6286,6 +6834,8 @@ def builtin_item_type(tp: Type) -> Type | None: "builtins.dict", "builtins.set", "builtins.frozenset", + "_collections_abc.dict_keys", + "typing.KeysView", ]: if not tp.args: # TODO: fix tuple in lib-stub/builtins.pyi (it should be generic). @@ -6481,7 +7031,7 @@ def is_unsafe_overlapping_overload_signatures( return is_callable_compatible( signature, other, - is_compat=is_overlapping_types_no_promote, + is_compat=is_overlapping_types_no_promote_no_uninhabited, is_compat_return=lambda l, r: not is_subtype_no_promote(l, r), ignore_return=False, check_args_covariantly=True, @@ -6489,7 +7039,7 @@ def is_unsafe_overlapping_overload_signatures( ) or is_callable_compatible( other, signature, - is_compat=is_overlapping_types_no_promote, + is_compat=is_overlapping_types_no_promote_no_uninhabited, is_compat_return=lambda l, r: not is_subtype_no_promote(r, l), ignore_return=False, check_args_covariantly=False, @@ -6562,7 +7112,6 @@ def overload_can_never_match(signature: CallableType, other: CallableType) -> bo exp_signature = expand_type( signature, {tvar.id: erase_def_to_union_or_bound(tvar) for tvar in signature.variables} ) - assert isinstance(exp_signature, ProperType) assert isinstance(exp_signature, CallableType) return is_callable_compatible( exp_signature, other, is_compat=is_more_precise, ignore_return=True @@ -6612,33 +7161,48 @@ def infer_operator_assignment_method(typ: Type, operator: str) -> tuple[bool, st return False, method -def is_valid_inferred_type(typ: Type) -> bool: - """Is an inferred type valid? +def is_valid_inferred_type(typ: Type, is_lvalue_final: bool = False) -> bool: + """Is an inferred type valid and needs no further refinement? - Examples of invalid types include the None type or List[]. + Examples of invalid types include the None type (when we are not assigning + None to a final lvalue) or List[]. When not doing strict Optional checking, all types containing None are invalid. When doing strict Optional checking, only None and types that are incompletely defined (i.e. contain UninhabitedType) are invalid. """ - if isinstance(get_proper_type(typ), (NoneType, UninhabitedType)): - # With strict Optional checking, we *may* eventually infer NoneType when - # the initializer is None, but we only do that if we can't infer a - # specific Optional type. This resolution happens in - # leave_partial_types when we pop a partial types scope. + proper_type = get_proper_type(typ) + if isinstance(proper_type, NoneType): + # If the lvalue is final, we may immediately infer NoneType when the + # initializer is None. + # + # If not, we want to defer making this decision. The final inferred + # type could either be NoneType or an Optional type, depending on + # the context. This resolution happens in leave_partial_types when + # we pop a partial types scope. + return is_lvalue_final + elif isinstance(proper_type, UninhabitedType): return False - return not typ.accept(NothingSeeker()) + return not typ.accept(InvalidInferredTypes()) -class NothingSeeker(TypeQuery[bool]): - """Find any types resulting from failed (ambiguous) type inference.""" +class InvalidInferredTypes(BoolTypeQuery): + """Find type components that are not valid for an inferred type. + + These include type, and any types resulting from failed + (ambiguous) type inference. + """ def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return t.ambiguous + def visit_erased_type(self, t: ErasedType) -> bool: + # This can happen inside a lambda. + return True + class SetNothingToAny(TypeTranslator): """Replace all ambiguous types with Any (to avoid spurious extra errors).""" @@ -6649,7 +7213,7 @@ def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the alias cannot by an ambiguous , so we just + # Target of the alias cannot be an ambiguous , so we just # replace the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) @@ -6969,12 +7533,33 @@ def is_static(func: FuncBase | Decorator) -> bool: assert False, f"Unexpected func type: {type(func)}" +def is_property(defn: SymbolNode) -> bool: + if isinstance(defn, Decorator): + return defn.func.is_property + if isinstance(defn, OverloadedFuncDef): + if defn.items and isinstance(defn.items[0], Decorator): + return defn.items[0].func.is_property + return False + + +def get_property_type(t: ProperType) -> ProperType: + if isinstance(t, CallableType): + return get_proper_type(t.ret_type) + if isinstance(t, Overloaded): + return get_proper_type(t.items[0].ret_type) + return t + + def is_subtype_no_promote(left: Type, right: Type) -> bool: return is_subtype(left, right, ignore_promotions=True) -def is_overlapping_types_no_promote(left: Type, right: Type) -> bool: - return is_overlapping_types(left, right, ignore_promotions=True) +def is_overlapping_types_no_promote_no_uninhabited(left: Type, right: Type) -> bool: + # For the purpose of unsafe overload checks we consider list[] and list[int] + # non-overlapping. This is consistent with how we treat list[int] and list[str] as + # non-overlapping, despite [] belongs to both. Also this will prevent false positives + # for failed type inference during unification. + return is_overlapping_types(left, right, ignore_promotions=True, ignore_uninhabited=True) def is_private(node_name: str) -> bool: diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index cb542ee5300b..38b5c2419d95 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -3,6 +3,7 @@ from __future__ import annotations import itertools +import time from contextlib import contextmanager from typing import Callable, ClassVar, Iterator, List, Optional, Sequence, cast from typing_extensions import Final, TypeAlias as _TypeAlias, overload @@ -111,6 +112,7 @@ custom_special_method, erase_to_union_or_bound, false_only, + fixup_partial_type, function_type, is_literal_type_like, make_simplified_union, @@ -127,6 +129,7 @@ CallableType, DeletedType, ErasedType, + ExtraAttrs, FunctionLike, Instance, LiteralType, @@ -137,15 +140,17 @@ ParamSpecType, PartialType, ProperType, - StarType, TupleType, Type, + TypeAliasType, TypedDictType, TypeOfAny, TypeType, + TypeVarTupleType, TypeVarType, UninhabitedType, UnionType, + UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, @@ -153,10 +158,12 @@ is_generic_instance, is_named_instance, is_optional, + is_self_type_like, remove_optional, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.typevars import fill_typevars +from mypy.typevartuples import find_unpack_in_list from mypy.util import split_module_names from mypy.visitor import ExpressionVisitor @@ -185,6 +192,11 @@ "_collections_abc.dict_keys", "_collections_abc.dict_items", ] +OVERLAPPING_BYTES_ALLOWLIST: Final = { + "builtins.bytes", + "builtins.bytearray", + "builtins.memoryview", +} class TooManyUnions(Exception): @@ -193,10 +205,12 @@ class TooManyUnions(Exception): """ -def allow_fast_container_literal(t: ProperType) -> bool: +def allow_fast_container_literal(t: Type) -> bool: + if isinstance(t, TypeAliasType) and t.is_recursive: + return False + t = get_proper_type(t) return isinstance(t, Instance) or ( - isinstance(t, TupleType) - and all(allow_fast_container_literal(get_proper_type(it)) for it in t.items) + isinstance(t, TupleType) and all(allow_fast_container_literal(it) for it in t.items) ) @@ -206,8 +220,8 @@ def extract_refexpr_names(expr: RefExpr) -> set[str]: Note that currently, the only two subclasses of RefExpr are NameExpr and MemberExpr.""" output: set[str] = set() - while isinstance(expr.node, MypyFile) or expr.fullname is not None: - if isinstance(expr.node, MypyFile) and expr.fullname is not None: + while isinstance(expr.node, MypyFile) or expr.fullname: + if isinstance(expr.node, MypyFile) and expr.fullname: # If it's None, something's wrong (perhaps due to an # import cycle or a suppressed error). For now we just # skip it. @@ -218,7 +232,7 @@ def extract_refexpr_names(expr: RefExpr) -> set[str]: if isinstance(expr.node, TypeInfo): # Reference to a class or a nested class output.update(split_module_names(expr.node.module_name)) - elif expr.fullname is not None and "." in expr.fullname and not is_suppressed_import: + elif "." in expr.fullname and not is_suppressed_import: # Everything else (that is not a silenced import within a class) output.add(expr.fullname.rsplit(".", 1)[0]) break @@ -255,11 +269,22 @@ class ExpressionChecker(ExpressionVisitor[Type]): strfrm_checker: StringFormatterChecker plugin: Plugin - def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin) -> None: + def __init__( + self, + chk: mypy.checker.TypeChecker, + msg: MessageBuilder, + plugin: Plugin, + per_line_checking_time_ns: dict[int, int], + ) -> None: """Construct an expression type checker.""" self.chk = chk self.msg = msg self.plugin = plugin + self.per_line_checking_time_ns = per_line_checking_time_ns + self.collect_line_checking_stats = chk.options.line_checking_stats is not None + # Are we already visiting some expression? This is used to avoid double counting + # time for nested expressions. + self.in_expression = False self.type_context = [None] # Temporary overrides for expression types. This is currently @@ -318,7 +343,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = self.typeddict_callable(node) else: result = type_object_type(node, self.named_type) - if isinstance(result, CallableType) and isinstance( # type: ignore + if isinstance(result, CallableType) and isinstance( # type: ignore[misc] result.ret_type, Instance ): # We need to set correct line and column @@ -331,13 +356,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: result = erasetype.erase_typevars(result) elif isinstance(node, MypyFile): # Reference to a module object. - try: - result = self.named_type("types.ModuleType") - except KeyError: - # In test cases might 'types' may not be available. - # Fall back to a dummy 'object' type instead to - # avoid a crash. - result = self.named_type("builtins.object") + result = self.module_type(node) elif isinstance(node, Decorator): result = self.analyze_var_ref(node.var, e) elif isinstance(node, TypeAlias): @@ -345,7 +364,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: # Note that we suppress bogus errors for alias redefinitions, # they are already reported in semanal.py. result = self.alias_type_in_runtime_context( - node, node.no_args, e, alias_definition=e.is_alias_rvalue or lvalue + node, ctx=e, alias_definition=e.is_alias_rvalue or lvalue ) elif isinstance(node, (TypeVarExpr, ParamSpecExpr)): result = self.object_type() @@ -373,6 +392,31 @@ def analyze_var_ref(self, var: Var, context: Context) -> Type: # Implicit 'Any' type. return AnyType(TypeOfAny.special_form) + def module_type(self, node: MypyFile) -> Instance: + try: + result = self.named_type("types.ModuleType") + except KeyError: + # In test cases might 'types' may not be available. + # Fall back to a dummy 'object' type instead to + # avoid a crash. + result = self.named_type("builtins.object") + module_attrs = {} + immutable = set() + for name, n in node.names.items(): + if not n.module_public: + continue + if isinstance(n.node, Var) and n.node.is_final: + immutable.add(name) + typ = self.chk.determine_type_of_member(n) + if typ: + module_attrs[name] = typ + else: + # TODO: what to do about nested module references? + # They are non-trivial because there may be import cycles. + module_attrs[name] = AnyType(TypeOfAny.special_form) + result.extra_attrs = ExtraAttrs(module_attrs, immutable, node.fullname) + return result + def visit_call_expr(self, e: CallExpr, allow_none_return: bool = False) -> Type: """Type check a call expression.""" if e.analyzed: @@ -486,7 +530,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> # There are two special cases where plugins might act: # * A "static" reference/alias to a class or function; # get_function_hook() will be invoked for these. - fullname = e.callee.fullname + fullname = e.callee.fullname or None if isinstance(e.callee.node, TypeAlias): target = get_proper_type(e.callee.node.target) if isinstance(target, Instance): @@ -496,7 +540,7 @@ def visit_call_expr_inner(self, e: CallExpr, allow_none_return: bool = False) -> # get_method_hook() and get_method_signature_hook() will # be invoked for these. if ( - fullname is None + not fullname and isinstance(e.callee, MemberExpr) and self.chk.has_type(e.callee.expr) ): @@ -565,7 +609,7 @@ def method_fullname(self, object_type: Type, method_name: str) -> str | None: elif isinstance(object_type, TupleType): type_name = tuple_fallback(object_type).type.fullname - if type_name is not None: + if type_name: return f"{type_name}.{method_name}" else: return None @@ -684,7 +728,11 @@ def validate_typeddict_kwargs(self, kwargs: DictExpr) -> dict[str, Expression] | literal_value = values[0] if literal_value is None: key_context = item_name_expr or item_arg - self.chk.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, key_context) + self.chk.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + key_context, + code=codes.LITERAL_REQ, + ) return None else: item_names.append(literal_value) @@ -750,17 +798,21 @@ def check_typeddict_call_with_kwargs( context: Context, orig_callee: Type | None, ) -> Type: - if not (callee.required_keys <= set(kwargs.keys()) <= set(callee.items.keys())): + actual_keys = kwargs.keys() + if not (callee.required_keys <= actual_keys <= callee.items.keys()): expected_keys = [ key for key in callee.items.keys() - if key in callee.required_keys or key in kwargs.keys() + if key in callee.required_keys or key in actual_keys ] - actual_keys = kwargs.keys() self.msg.unexpected_typeddict_keys( callee, expected_keys=expected_keys, actual_keys=list(actual_keys), context=context ) - return AnyType(TypeOfAny.from_error) + if callee.required_keys > actual_keys: + # found_set is a sub-set of the required_keys + # This means we're missing some keys and as such, we can't + # properly type the object + return AnyType(TypeOfAny.from_error) orig_callee = get_proper_type(orig_callee) if isinstance(orig_callee, CallableType): @@ -800,10 +852,11 @@ def check_typeddict_call_with_kwargs( lvalue_type=item_expected_type, rvalue=item_value, context=item_value, - msg=message_registry.INCOMPATIBLE_TYPES, + msg=ErrorMessage( + message_registry.INCOMPATIBLE_TYPES.value, code=codes.TYPEDDICT_ITEM + ), lvalue_name=f'TypedDict item "{item_name}"', rvalue_name="expression", - code=codes.TYPEDDICT_ITEM, ) return orig_ret_type @@ -854,7 +907,8 @@ def try_infer_partial_type(self, e: CallExpr) -> None: return var, partial_types = ret typ = self.try_infer_partial_value_type_from_call(e, callee.name, var) - if typ is not None: + # Var may be deleted from partial_types in try_infer_partial_value_type_from_call + if typ is not None and var in partial_types: var.type = typ del partial_types[var] elif isinstance(callee.expr, IndexExpr) and isinstance(callee.expr.base, RefExpr): @@ -1173,7 +1227,7 @@ def check_call_expr_with_callee_type( return ret_type def check_union_call_expr(self, e: CallExpr, object_type: UnionType, member: str) -> Type: - """ "Type check calling a member expression where the base type is a union.""" + """Type check calling a member expression where the base type is a union.""" res: list[Type] = [] for typ in object_type.relevant_items(): # Member access errors are already reported when visiting the member expression. @@ -1322,6 +1376,8 @@ def check_callable_call( See the docstring of check_call for more information. """ + # Always unpack **kwargs before checking a call. + callee = callee.with_unpacked_kwargs() if callable_name is None and callee.name: callable_name = callee.name ret_type = get_proper_type(callee.ret_type) @@ -1370,16 +1426,26 @@ def check_callable_call( ) if callee.is_generic(): - need_refresh = any(isinstance(v, ParamSpecType) for v in callee.variables) + need_refresh = any( + isinstance(v, (ParamSpecType, TypeVarTupleType)) for v in callee.variables + ) callee = freshen_function_type_vars(callee) callee = self.infer_function_type_arguments_using_context(callee, context) + if need_refresh: + # Argument kinds etc. may have changed due to + # ParamSpec or TypeVarTuple variables being replaced with an arbitrary + # number of arguments; recalculate actual-to-formal map + formal_to_actual = map_actuals_to_formals( + arg_kinds, + arg_names, + callee.arg_kinds, + callee.arg_names, + lambda i: self.accept(args[i]), + ) callee = self.infer_function_type_arguments( callee, args, arg_kinds, formal_to_actual, context ) if need_refresh: - # Argument kinds etc. may have changed due to - # ParamSpec variables being replaced with an arbitrary - # number of arguments; recalculate actual-to-formal map formal_to_actual = map_actuals_to_formals( arg_kinds, arg_names, @@ -1482,7 +1548,7 @@ def analyze_type_type_callee(self, item: ProperType, context: Context) -> Type: res = type_object_type(item.type, self.named_type) if isinstance(res, CallableType): res = res.copy_modified(from_type_type=True) - expanded = get_proper_type(expand_type_by_instance(res, item)) + expanded = expand_type_by_instance(res, item) if isinstance(expanded, CallableType): # Callee of the form Type[...] should never be generic, only # proper class objects can be. @@ -1531,21 +1597,21 @@ def infer_arg_types_in_empty_context(self, args: list[Expression]) -> list[Type] res.append(arg_type) return res - @contextmanager - def allow_unions(self, type_context: Type) -> Iterator[None]: - # This is a hack to better support inference for recursive types. - # When the outer context for a function call is known to be recursive, - # we solve type constraints inferred from arguments using unions instead - # of joins. This is a bit arbitrary, but in practice it works for most - # cases. A cleaner alternative would be to switch to single bin type - # inference, but this is a lot of work. - old = TypeState.infer_unions + def infer_more_unions_for_recursive_type(self, type_context: Type) -> bool: + """Adjust type inference of unions if type context has a recursive type. + + Return the old state. The caller must assign it to type_state.infer_unions + afterwards. + + This is a hack to better support inference for recursive types. + + Note: This is performance-sensitive and must not be a context manager + until mypyc supports them better. + """ + old = type_state.infer_unions if has_recursive_types(type_context): - TypeState.infer_unions = True - try: - yield - finally: - TypeState.infer_unions = old + type_state.infer_unions = True + return old def infer_arg_types_in_context( self, @@ -1566,8 +1632,16 @@ def infer_arg_types_in_context( for i, actuals in enumerate(formal_to_actual): for ai in actuals: if not arg_kinds[ai].is_star(): - with self.allow_unions(callee.arg_types[i]): - res[ai] = self.accept(args[ai], callee.arg_types[i]) + arg_type = callee.arg_types[i] + # When the outer context for a function call is known to be recursive, + # we solve type constraints inferred from arguments using unions instead + # of joins. This is a bit arbitrary, but in practice it works for most + # cases. A cleaner alternative would be to switch to single bin type + # inference, but this is a lot of work. + old = self.infer_more_unions_for_recursive_type(arg_type) + res[ai] = self.accept(args[ai], arg_type) + # We need to manually restore union inference state, ugh. + type_state.infer_unions = old # Fill in the rest of the argument types. for i, t in enumerate(res): @@ -1968,11 +2042,84 @@ def check_argument_types( # Keep track of consumed tuple *arg items. mapper = ArgTypeExpander(self.argument_infer_context()) for i, actuals in enumerate(formal_to_actual): - for actual in actuals: - actual_type = arg_types[actual] + orig_callee_arg_type = get_proper_type(callee.arg_types[i]) + + # Checking the case that we have more than one item but the first argument + # is an unpack, so this would be something like: + # [Tuple[Unpack[Ts]], int] + # + # In this case we have to check everything together, we do this by re-unifying + # the suffices to the tuple, e.g. a single actual like + # Tuple[Unpack[Ts], int] + expanded_tuple = False + if len(actuals) > 1: + first_actual_arg_type = get_proper_type(arg_types[actuals[0]]) + if ( + isinstance(first_actual_arg_type, TupleType) + and len(first_actual_arg_type.items) == 1 + and isinstance(get_proper_type(first_actual_arg_type.items[0]), UnpackType) + ): + # TODO: use walrus operator + actual_types = [first_actual_arg_type.items[0]] + [ + arg_types[a] for a in actuals[1:] + ] + actual_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * (len(actuals) - 1) + + assert isinstance(orig_callee_arg_type, TupleType) + assert orig_callee_arg_type.items + callee_arg_types = orig_callee_arg_type.items + callee_arg_kinds = [nodes.ARG_STAR] + [nodes.ARG_POS] * ( + len(orig_callee_arg_type.items) - 1 + ) + expanded_tuple = True + + if not expanded_tuple: + actual_types = [arg_types[a] for a in actuals] + actual_kinds = [arg_kinds[a] for a in actuals] + if isinstance(orig_callee_arg_type, UnpackType): + unpacked_type = get_proper_type(orig_callee_arg_type.type) + if isinstance(unpacked_type, TupleType): + inner_unpack_index = find_unpack_in_list(unpacked_type.items) + if inner_unpack_index is None: + callee_arg_types = unpacked_type.items + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + inner_unpack = get_proper_type(unpacked_type.items[inner_unpack_index]) + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + # We assume heterogenous tuples are desugared earlier + assert isinstance(inner_unpacked_type, Instance) + assert inner_unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = ( + unpacked_type.items[:inner_unpack_index] + + [inner_unpacked_type.args[0]] + * (len(actuals) - len(unpacked_type.items) + 1) + + unpacked_type.items[inner_unpack_index + 1 :] + ) + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + assert isinstance(unpacked_type, Instance) + assert unpacked_type.type.fullname == "builtins.tuple" + callee_arg_types = [unpacked_type.args[0]] * len(actuals) + callee_arg_kinds = [ARG_POS] * len(actuals) + else: + callee_arg_types = [orig_callee_arg_type] * len(actuals) + callee_arg_kinds = [callee.arg_kinds[i]] * len(actuals) + + assert len(actual_types) == len(actuals) == len(actual_kinds) + + if len(callee_arg_types) != len(actual_types): + # TODO: Improve error message + self.chk.fail("Invalid number of arguments", context) + continue + + assert len(callee_arg_types) == len(actual_types) + assert len(callee_arg_types) == len(callee_arg_kinds) + for actual, actual_type, actual_kind, callee_arg_type, callee_arg_kind in zip( + actuals, actual_types, actual_kinds, callee_arg_types, callee_arg_kinds + ): if actual_type is None: continue # Some kind of error was already reported. - actual_kind = arg_kinds[actual] # Check that a *arg is valid as varargs. if actual_kind == nodes.ARG_STAR and not self.is_valid_var_arg(actual_type): self.msg.invalid_var_arg(actual_type, context) @@ -1982,13 +2129,13 @@ def check_argument_types( is_mapping = is_subtype(actual_type, self.chk.named_type("typing.Mapping")) self.msg.invalid_keyword_var_arg(actual_type, is_mapping, context) expanded_actual = mapper.expand_actual_type( - actual_type, actual_kind, callee.arg_names[i], callee.arg_kinds[i] + actual_type, actual_kind, callee.arg_names[i], callee_arg_kind ) check_arg( expanded_actual, actual_type, - arg_kinds[actual], - callee.arg_types[i], + actual_kind, + callee_arg_type, actual + 1, i + 1, callee, @@ -2029,8 +2176,6 @@ def check_arg( ): self.msg.concrete_only_call(callee_type, context) elif not is_subtype(caller_type, callee_type, options=self.chk.options): - if self.chk.should_suppress_optional_error([caller_type, callee_type]): - return code = self.msg.incompatible_argument( n, m, @@ -2044,7 +2189,8 @@ def check_arg( self.msg.incompatible_argument_note( original_caller_type, callee_type, context, code=code ) - self.chk.check_possible_missing_await(caller_type, callee_type, context) + if not self.msg.prefer_simple_messages(): + self.chk.check_possible_missing_await(caller_type, callee_type, context) def check_overload_call( self, @@ -2057,6 +2203,8 @@ def check_overload_call( context: Context, ) -> tuple[Type, Type]: """Checks a call to an overloaded function.""" + # Normalize unpacked kwargs before checking the call. + callee = callee.with_unpacked_kwargs() arg_types = self.infer_arg_types_in_empty_context(args) # Step 1: Filter call targets to remove ones where the argument counts don't match plausible_targets = self.plausible_overload_call_targets( @@ -2097,7 +2245,7 @@ def check_overload_call( # we don't want to introduce internal inconsistencies. unioned_result = ( make_simplified_union(list(returns), context.line, context.column), - self.combine_function_signatures(inferred_types), + self.combine_function_signatures(get_proper_types(inferred_types)), ) # Step 3: We try checking each branch one-by-one. @@ -2150,13 +2298,11 @@ def check_overload_call( else: # There was no plausible match: give up target = AnyType(TypeOfAny.from_error) - - if not self.chk.should_suppress_optional_error(arg_types): - if not is_operator_method(callable_name): - code = None - else: - code = codes.OPERATOR - self.msg.no_variant_matches_arguments(callee, arg_types, context, code=code) + if not is_operator_method(callable_name): + code = None + else: + code = codes.OPERATOR + self.msg.no_variant_matches_arguments(callee, arg_types, context, code=code) result = self.check_call( target, @@ -2440,7 +2586,7 @@ def type_overrides_set( for expr in exprs: del self.type_overrides[expr] - def combine_function_signatures(self, types: Sequence[Type]) -> AnyType | CallableType: + def combine_function_signatures(self, types: list[ProperType]) -> AnyType | CallableType: """Accepts a list of function signatures and attempts to combine them together into a new CallableType consisting of the union of all of the given arguments and return types. @@ -2448,10 +2594,9 @@ def combine_function_signatures(self, types: Sequence[Type]) -> AnyType | Callab an ambiguity because of Any in arguments). """ assert types, "Trying to merge no callables" - types = get_proper_types(types) if not all(isinstance(c, CallableType) for c in types): return AnyType(TypeOfAny.special_form) - callables = cast(Sequence[CallableType], types) + callables = cast("list[CallableType]", types) if len(callables) == 1: return callables[0] @@ -2638,6 +2783,10 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type if isinstance(base, RefExpr) and isinstance(base.node, MypyFile): module_symbol_table = base.node.names + if isinstance(base, RefExpr) and isinstance(base.node, Var): + is_self = base.node.is_self + else: + is_self = False member_type = analyze_member_access( e.name, @@ -2651,6 +2800,7 @@ def analyze_ordinary_member_access(self, e: MemberExpr, is_lvalue: bool) -> Type chk=self.chk, in_literal_context=self.is_literal_context(), module_symbol_table=module_symbol_table, + is_self=is_self, ) return member_type @@ -2737,6 +2887,9 @@ def visit_ellipsis(self, e: EllipsisExpr) -> Type: def visit_op_expr(self, e: OpExpr) -> Type: """Type check a binary operator expression.""" + if e.analyzed: + # It's actually a type expression X | Y. + return self.accept(e.analyzed) if e.op == "and" or e.op == "or": return self.check_boolean_op(e, e) if e.op == "*" and isinstance(e.left, ListExpr): @@ -2774,68 +2927,108 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: That is, 'a < b > c == d' is check as 'a < b and b > c and c == d' """ result: Type | None = None - sub_result: Type | None = None + sub_result: Type # Check each consecutive operand pair and their operator for left, right, operator in zip(e.operands, e.operands[1:], e.operators): left_type = self.accept(left) - method_type: mypy.types.Type | None = None - if operator == "in" or operator == "not in": + # This case covers both iterables and containers, which have different meanings. + # For a container, the in operator calls the __contains__ method. + # For an iterable, the in operator iterates over the iterable, and compares each item one-by-one. + # We allow `in` for a union of containers and iterables as long as at least one of them matches the + # type of the left operand, as the operation will simply return False if the union's container/iterator + # type doesn't match the left operand. + # If the right operand has partial type, look it up without triggering # a "Need type annotation ..." message, as it would be noise. right_type = self.find_partial_type_ref_fast_path(right) if right_type is None: right_type = self.accept(right) # Validate the right operand - # Keep track of whether we get type check errors (these won't be reported, they - # are just to verify whether something is valid typing wise). - with self.msg.filter_errors(save_filtered_errors=True) as local_errors: - _, method_type = self.check_method_call_by_name( - method="__contains__", - base_type=right_type, - args=[left], - arg_kinds=[ARG_POS], - context=e, - ) + right_type = get_proper_type(right_type) + item_types: Sequence[Type] = [right_type] + if isinstance(right_type, UnionType): + item_types = list(right_type.relevant_items()) sub_result = self.bool_type() - # Container item type for strict type overlap checks. Note: we need to only - # check for nominal type, because a usual "Unsupported operands for in" - # will be reported for types incompatible with __contains__(). - # See testCustomContainsCheckStrictEquality for an example. - cont_type = self.chk.analyze_container_item_type(right_type) - if isinstance(right_type, PartialType): - # We don't really know if this is an error or not, so just shut up. - pass - elif ( - local_errors.has_new_errors() - and - # is_valid_var_arg is True for any Iterable - self.is_valid_var_arg(right_type) - ): - _, itertype = self.chk.analyze_iterable_item_type(right) - method_type = CallableType( - [left_type], - [nodes.ARG_POS], - [None], - self.bool_type(), - self.named_type("builtins.function"), - ) - if not is_subtype(left_type, itertype): - self.msg.unsupported_operand_types("in", left_type, right_type, e) - # Only show dangerous overlap if there are no other errors. - elif ( - not local_errors.has_new_errors() - and cont_type - and self.dangerous_comparison( - left_type, cont_type, original_container=right_type - ) - ): - self.msg.dangerous_comparison(left_type, cont_type, "container", e) - else: - self.msg.add_errors(local_errors.filtered_errors()) + + container_types: list[Type] = [] + iterable_types: list[Type] = [] + failed_out = False + encountered_partial_type = False + + for item_type in item_types: + # Keep track of whether we get type check errors (these won't be reported, they + # are just to verify whether something is valid typing wise). + with self.msg.filter_errors(save_filtered_errors=True) as container_errors: + _, method_type = self.check_method_call_by_name( + method="__contains__", + base_type=item_type, + args=[left], + arg_kinds=[ARG_POS], + context=e, + original_type=right_type, + ) + # Container item type for strict type overlap checks. Note: we need to only + # check for nominal type, because a usual "Unsupported operands for in" + # will be reported for types incompatible with __contains__(). + # See testCustomContainsCheckStrictEquality for an example. + cont_type = self.chk.analyze_container_item_type(item_type) + + if isinstance(item_type, PartialType): + # We don't really know if this is an error or not, so just shut up. + encountered_partial_type = True + pass + elif ( + container_errors.has_new_errors() + and + # is_valid_var_arg is True for any Iterable + self.is_valid_var_arg(item_type) + ): + # it's not a container, but it is an iterable + with self.msg.filter_errors(save_filtered_errors=True) as iterable_errors: + _, itertype = self.chk.analyze_iterable_item_type_without_expression( + item_type, e + ) + if iterable_errors.has_new_errors(): + self.msg.add_errors(iterable_errors.filtered_errors()) + failed_out = True + else: + method_type = CallableType( + [left_type], + [nodes.ARG_POS], + [None], + self.bool_type(), + self.named_type("builtins.function"), + ) + e.method_types.append(method_type) + iterable_types.append(itertype) + elif not container_errors.has_new_errors() and cont_type: + container_types.append(cont_type) + e.method_types.append(method_type) + else: + self.msg.add_errors(container_errors.filtered_errors()) + failed_out = True + + if not encountered_partial_type and not failed_out: + iterable_type = UnionType.make_union(iterable_types) + if not is_subtype(left_type, iterable_type): + if len(container_types) == 0: + self.msg.unsupported_operand_types("in", left_type, right_type, e) + else: + container_type = UnionType.make_union(container_types) + if self.dangerous_comparison( + left_type, + container_type, + original_container=right_type, + prefer_literal=False, + ): + self.msg.dangerous_comparison( + left_type, container_type, "container", e + ) + elif operator in operators.op_methods: method = operators.op_methods[operator] @@ -2843,39 +3036,30 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: sub_result, method_type = self.check_op( method, left_type, right, e, allow_reverse=True ) + e.method_types.append(method_type) # Only show dangerous overlap if there are no other errors. See # testCustomEqCheckStrictEquality for an example. if not w.has_new_errors() and operator in ("==", "!="): right_type = self.accept(right) - # We suppress the error if there is a custom __eq__() method on either - # side. User defined (or even standard library) classes can define this - # to return True for comparisons between non-overlapping types. - if not custom_special_method( - left_type, "__eq__" - ) and not custom_special_method(right_type, "__eq__"): - # Also flag non-overlapping literals in situations like: - # x: Literal['a', 'b'] - # if x == 'c': - # ... + if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible left_type = try_getting_literal(left_type) right_type = try_getting_literal(right_type) - if self.dangerous_comparison(left_type, right_type): - self.msg.dangerous_comparison(left_type, right_type, "equality", e) + self.msg.dangerous_comparison(left_type, right_type, "equality", e) elif operator == "is" or operator == "is not": right_type = self.accept(right) # validate the right operand sub_result = self.bool_type() - left_type = try_getting_literal(left_type) - right_type = try_getting_literal(right_type) if self.dangerous_comparison(left_type, right_type): + # Show the most specific literal types possible + left_type = try_getting_literal(left_type) + right_type = try_getting_literal(right_type) self.msg.dangerous_comparison(left_type, right_type, "identity", e) - method_type = None + e.method_types.append(None) else: raise RuntimeError(f"Unknown comparison operator {operator}") - e.method_types.append(method_type) - # Determine type of boolean-and of result and sub_result if result is None: result = sub_result @@ -2897,12 +3081,17 @@ def find_partial_type_ref_fast_path(self, expr: Expression) -> Type | None: if isinstance(expr.node, Var): result = self.analyze_var_ref(expr.node, expr) if isinstance(result, PartialType) and result.type is not None: - self.chk.store_type(expr, self.chk.fixup_partial_type(result)) + self.chk.store_type(expr, fixup_partial_type(result)) return result return None def dangerous_comparison( - self, left: Type, right: Type, original_container: Type | None = None + self, + left: Type, + right: Type, + original_container: Type | None = None, + *, + prefer_literal: bool = True, ) -> bool: """Check for dangerous non-overlapping comparisons like 42 == 'no'. @@ -2924,6 +3113,20 @@ def dangerous_comparison( left, right = get_proper_types((left, right)) + # We suppress the error if there is a custom __eq__() method on either + # side. User defined (or even standard library) classes can define this + # to return True for comparisons between non-overlapping types. + if custom_special_method(left, "__eq__") or custom_special_method(right, "__eq__"): + return False + + if prefer_literal: + # Also flag non-overlapping literals in situations like: + # x: Literal['a', 'b'] + # if x == 'c': + # ... + left = try_getting_literal(left) + right = try_getting_literal(right) + if self.chk.binder.is_unreachable_warning_suppressed(): # We are inside a function that contains type variables with value restrictions in # its signature. In this case we just suppress all strict-equality checks to avoid @@ -2954,14 +3157,22 @@ def dangerous_comparison( return False if isinstance(left, Instance) and isinstance(right, Instance): # Special case some builtin implementations of AbstractSet. + left_name = left.type.fullname + right_name = right.type.fullname if ( - left.type.fullname in OVERLAPPING_TYPES_ALLOWLIST - and right.type.fullname in OVERLAPPING_TYPES_ALLOWLIST + left_name in OVERLAPPING_TYPES_ALLOWLIST + and right_name in OVERLAPPING_TYPES_ALLOWLIST ): abstract_set = self.chk.lookup_typeinfo("typing.AbstractSet") left = map_instance_to_supertype(left, abstract_set) right = map_instance_to_supertype(right, abstract_set) - return not is_overlapping_types(left.args[0], right.args[0]) + return self.dangerous_comparison(left.args[0], right.args[0]) + elif left_name in ("builtins.list", "builtins.tuple") and right_name == left_name: + return self.dangerous_comparison(left.args[0], right.args[0]) + elif left_name in OVERLAPPING_BYTES_ALLOWLIST and right_name in ( + OVERLAPPING_BYTES_ALLOWLIST + ): + return False if isinstance(left, LiteralType) and isinstance(right, LiteralType): if isinstance(left.value, bool) and isinstance(right.value, bool): # Comparing different booleans is not dangerous. @@ -3155,7 +3366,10 @@ def lookup_definer(typ: Instance, attr_name: str) -> str | None: is_subtype(right_type, left_type) and isinstance(left_type, Instance) and isinstance(right_type, Instance) - and left_type.type.alt_promote is not right_type.type + and not ( + left_type.type.alt_promote is not None + and left_type.type.alt_promote.type is right_type.type + ) and lookup_definer(left_type, op_name) != lookup_definer(right_type, rev_op_name) ): # When we do "A() + B()" where B is a subclass of A, we'll actually try calling @@ -3341,7 +3555,7 @@ def check_op( # we call 'combine_function_signature' instead of just unioning the inferred # callable types. results_final = make_simplified_union(all_results) - inferred_final = self.combine_function_signatures(all_inferred) + inferred_final = self.combine_function_signatures(get_proper_types(all_inferred)) return results_final, inferred_final else: return self.check_method_call_by_name( @@ -3370,7 +3584,8 @@ def check_boolean_op(self, e: OpExpr, context: Context) -> Type: assert e.op in ("and", "or") # Checked by visit_op_expr if e.right_always: - left_map, right_map = None, {} # type: mypy.checker.TypeMap, mypy.checker.TypeMap + left_map: mypy.checker.TypeMap = None + right_map: mypy.checker.TypeMap = {} elif e.right_unreachable: left_map, right_map = {}, None elif e.op == "and": @@ -3621,7 +3836,9 @@ def nonliteral_tuple_index_helper(self, left_type: TupleType, index: Expression) return self.chk.named_generic_type("builtins.tuple", [union]) return union - def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) -> Type: + def visit_typeddict_index_expr( + self, td_type: TypedDictType, index: Expression, setitem: bool = False + ) -> Type: if isinstance(index, StrExpr): key_names = [index.value] else: @@ -3650,7 +3867,7 @@ def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) for key_name in key_names: value_type = td_type.items.get(key_name) if value_type is None: - self.msg.typeddict_key_not_found(td_type, key_name, index) + self.msg.typeddict_key_not_found(td_type, key_name, index, setitem) return AnyType(TypeOfAny.from_error) else: value_types.append(value_type) @@ -3701,6 +3918,11 @@ def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type: ) target_type = expr.type if not is_same_type(source_type, target_type): + if not self.chk.in_checked_function(): + self.msg.note( + '"assert_type" expects everything to be "Any" in unchecked functions', + expr.expr, + ) self.msg.assert_type_fail(source_type, target_type, expr) return source_type @@ -3738,10 +3960,8 @@ def visit_type_application(self, tapp: TypeApplication) -> Type: There are two different options here, depending on whether expr refers to a type alias or directly to a generic class. In the first case we need - to use a dedicated function typeanal.expand_type_aliases. This - is due to the fact that currently type aliases machinery uses - unbound type variables, while normal generics use bound ones; - see TypeAlias docstring for more details. + to use a dedicated function typeanal.expand_type_alias(). This + is due to some differences in how type arguments are applied and checked. """ if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): # Subscription of a (generic) alias in runtime context, expand the alias. @@ -3783,12 +4003,10 @@ def visit_type_alias_expr(self, alias: TypeAliasExpr) -> Type: both `reveal_type` instances will reveal the same type `def (...) -> builtins.list[Any]`. Note that type variables are implicitly substituted with `Any`. """ - return self.alias_type_in_runtime_context( - alias.node, alias.no_args, alias, alias_definition=True - ) + return self.alias_type_in_runtime_context(alias.node, ctx=alias, alias_definition=True) def alias_type_in_runtime_context( - self, alias: TypeAlias, no_args: bool, ctx: Context, *, alias_definition: bool = False + self, alias: TypeAlias, *, ctx: Context, alias_definition: bool = False ) -> Type: """Get type of a type alias (could be generic) in a runtime expression. @@ -3803,7 +4021,7 @@ class LongName(Generic[T]): ... x = A() y = cast(A, ...) """ - if isinstance(alias.target, Instance) and alias.target.invalid: # type: ignore + if isinstance(alias.target, Instance) and alias.target.invalid: # type: ignore[misc] # An invalid alias, error already has been reported return AnyType(TypeOfAny.from_error) # If this is a generic alias, we set all variables to `Any`. @@ -3820,7 +4038,7 @@ class LongName(Generic[T]): ... # Normally we get a callable type (or overloaded) with .is_type_obj() true # representing the class's constructor tp = type_object_type(item.type, self.named_type) - if no_args: + if alias.no_args: return tp return self.apply_type_arguments_to_callable(tp, item.args, ctx) elif ( @@ -3837,8 +4055,8 @@ class LongName(Generic[T]): ... else: if alias_definition: return AnyType(TypeOfAny.special_form) - # This type is invalid in most runtime contexts, give it an 'object' type. - return self.named_type("builtins.object") + # The _SpecialForm type can be used in some runtime contexts (e.g. it may have __or__). + return self.named_type("typing._SpecialForm") def apply_type_arguments_to_callable( self, tp: Type, args: Sequence[Type], ctx: Context @@ -3854,6 +4072,9 @@ def apply_type_arguments_to_callable( if isinstance(tp, CallableType): if len(tp.variables) != len(args): + if tp.is_type_obj() and tp.type_object().fullname == "builtins.tuple": + # TODO: Specialize the callable for the type arguments + return tp self.msg.incompatible_type_application(len(tp.variables), len(args), ctx) return AnyType(TypeOfAny.from_error) return self.apply_generic_arguments(tp, args, ctx) @@ -4043,6 +4264,17 @@ def fast_dict_type(self, e: DictExpr) -> Type | None: self.resolved_type[e] = dt return dt + def check_typeddict_literal_in_context( + self, e: DictExpr, typeddict_context: TypedDictType + ) -> Type: + orig_ret_type = self.check_typeddict_call_with_dict( + callee=typeddict_context, kwargs=e, context=e, orig_callee=None + ) + ret_type = get_proper_type(orig_ret_type) + if isinstance(ret_type, TypedDictType): + return ret_type.copy_modified() + return typeddict_context.copy_modified() + def visit_dict_expr(self, e: DictExpr) -> Type: """Type check a dict expression. @@ -4052,15 +4284,20 @@ def visit_dict_expr(self, e: DictExpr) -> Type: # an error, but returns the TypedDict type that matches the literal it found # that would cause a second error when that TypedDict type is returned upstream # to avoid the second error, we always return TypedDict type that was requested - typeddict_context = self.find_typeddict_context(self.type_context[-1], e) - if typeddict_context: - orig_ret_type = self.check_typeddict_call_with_dict( - callee=typeddict_context, kwargs=e, context=e, orig_callee=None - ) - ret_type = get_proper_type(orig_ret_type) - if isinstance(ret_type, TypedDictType): - return ret_type.copy_modified() - return typeddict_context.copy_modified() + typeddict_contexts = self.find_typeddict_context(self.type_context[-1], e) + if typeddict_contexts: + if len(typeddict_contexts) == 1: + return self.check_typeddict_literal_in_context(e, typeddict_contexts[0]) + # Multiple items union, check if at least one of them matches cleanly. + for typeddict_context in typeddict_contexts: + with self.msg.filter_errors() as err, self.chk.local_type_map() as tmap: + ret_type = self.check_typeddict_literal_in_context(e, typeddict_context) + if err.has_new_errors(): + continue + self.chk.store_types(tmap) + return ret_type + # No item matched without an error, so we can't unambiguously choose the item. + self.msg.typeddict_context_ambiguous(typeddict_contexts, e) # fast path attempt dt = self.fast_dict_type(e) @@ -4126,26 +4363,20 @@ def visit_dict_expr(self, e: DictExpr) -> Type: def find_typeddict_context( self, context: Type | None, dict_expr: DictExpr - ) -> TypedDictType | None: + ) -> list[TypedDictType]: context = get_proper_type(context) if isinstance(context, TypedDictType): - return context + return [context] elif isinstance(context, UnionType): items = [] for item in context.items: - item_context = self.find_typeddict_context(item, dict_expr) - if item_context is not None and self.match_typeddict_call_with_dict( - item_context, dict_expr, dict_expr - ): - items.append(item_context) - if len(items) == 1: - # Only one union item is valid TypedDict for the given dict_expr, so use the - # context as it's unambiguous. - return items[0] - if len(items) > 1: - self.msg.typeddict_context_ambiguous(items, dict_expr) + item_contexts = self.find_typeddict_context(item, dict_expr) + for item_context in item_contexts: + if self.match_typeddict_call_with_dict(item_context, dict_expr, dict_expr): + items.append(item_context) + return items # No TypedDict type in context. - return None + return [] def visit_lambda_expr(self, e: LambdaExpr) -> Type: """Type check lambda expression.""" @@ -4208,6 +4439,10 @@ def infer_lambda_type_using_context( callable_ctx = get_proper_type(replace_meta_vars(ctx, ErasedType())) assert isinstance(callable_ctx, CallableType) + # The callable_ctx may have a fallback of builtins.type if the context + # is a constructor -- but this fallback doesn't make sense for lambdas. + callable_ctx = callable_ctx.copy_modified(fallback=self.named_type("builtins.function")) + if callable_ctx.type_guard is not None: # Lambda's return type cannot be treated as a `TypeGuard`, # because it is implicit. And `TypeGuard`s must be explicit. @@ -4263,11 +4498,35 @@ def visit_super_expr(self, e: SuperExpr) -> Type: # The base is the first MRO entry *after* type_info that has a member # with the right name - try: + index = None + if type_info in mro: index = mro.index(type_info) - except ValueError: - self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) - return AnyType(TypeOfAny.from_error) + else: + method = self.chk.scope.top_function() + # Mypy explicitly allows supertype upper bounds (and no upper bound at all) + # for annotating self-types. However, if such an annotation is used for + # checking super() we will still get an error. So to be consistent, we also + # allow such imprecise annotations for use with super(), where we fall back + # to the current class MRO instead. This works only from inside a method. + if method is not None and is_self_type_like( + instance_type, is_classmethod=method.is_class + ): + if e.info and type_info in e.info.mro: + mro = e.info.mro + index = mro.index(type_info) + if index is None: + if ( + instance_info.is_protocol + and instance_info != type_info + and not type_info.is_protocol + ): + # A special case for mixins, in this case super() should point + # directly to the host protocol, this is not safe, since the real MRO + # is not known yet for mixin, but this feature is more like an escape hatch. + index = -1 + else: + self.chk.fail(message_registry.SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1, e) + return AnyType(TypeOfAny.from_error) if len(mro) == index + 1: self.chk.fail(message_registry.TARGET_CLASS_HAS_NO_BASE_CLASS, e) @@ -4559,7 +4818,7 @@ def visit_conditional_expr(self, e: ConditionalExpr, allow_none_return: bool = F # # TODO: Always create a union or at least in more cases? if isinstance(get_proper_type(self.type_context[-1]), UnionType): - res = make_simplified_union([if_type, full_context_else_type]) + res: Type = make_simplified_union([if_type, full_context_else_type]) else: res = join.join_types(if_type, else_type) @@ -4598,7 +4857,14 @@ def accept( applies only to this expression and not any subexpressions. """ if node in self.type_overrides: + # This branch is very fast, there is no point timing it. return self.type_overrides[node] + # We don't use context manager here to get most precise data (and avoid overhead). + record_time = False + if self.collect_line_checking_stats and not self.in_expression: + t0 = time.perf_counter_ns() + self.in_expression = True + record_time = True self.type_context.append(type_context) old_is_callee = self.is_callee self.is_callee = is_callee @@ -4633,9 +4899,13 @@ def accept( self.msg.disallowed_any_type(typ, node) if not self.chk.in_checked_function() or self.chk.current_node_deferred: - return AnyType(TypeOfAny.unannotated) + result: Type = AnyType(TypeOfAny.unannotated) else: - return typ + result = typ + if record_time: + self.per_line_checking_time_ns[node.line] += time.perf_counter_ns() - t0 + self.in_expression = False + return result def named_type(self, name: str) -> Instance: """Return an instance type with type given by the name and no type @@ -4654,6 +4924,7 @@ def is_valid_var_arg(self, typ: Type) -> bool: ) or isinstance(typ, AnyType) or isinstance(typ, ParamSpecType) + or isinstance(typ, UnpackType) ) def is_valid_keyword_var_arg(self, typ: Type) -> bool: @@ -4691,7 +4962,7 @@ def has_member(self, typ: Type, member: str) -> bool: typ = typ.fallback if isinstance(typ, Instance): return typ.type.has_readable_member(member) - if isinstance(typ, CallableType) and typ.is_type_obj(): + if isinstance(typ, FunctionLike) and typ.is_type_obj(): return typ.fallback.type.has_readable_member(member) elif isinstance(typ, AnyType): return True @@ -4897,8 +5168,9 @@ def visit_typeddict_expr(self, e: TypedDictExpr) -> Type: def visit__promote_expr(self, e: PromoteExpr) -> Type: return e.type - def visit_star_expr(self, e: StarExpr) -> StarType: - return StarType(self.accept(e.expr)) + def visit_star_expr(self, e: StarExpr) -> Type: + # TODO: should this ever be called (see e.g. mypyc visitor)? + return self.accept(e.expr) def object_type(self) -> Instance: """Return instance type 'object'.""" @@ -4948,9 +5220,9 @@ def has_any_type(t: Type, ignore_in_type_obj: bool = False) -> bool: return t.accept(HasAnyType(ignore_in_type_obj)) -class HasAnyType(types.TypeQuery[bool]): +class HasAnyType(types.BoolTypeQuery): def __init__(self, ignore_in_type_obj: bool) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) self.ignore_in_type_obj = ignore_in_type_obj def visit_any(self, t: AnyType) -> bool: @@ -5027,7 +5299,7 @@ def replace_callable_return_type(c: CallableType, new_ret_type: Type) -> Callabl return c.copy_modified(ret_type=new_ret_type) -class ArgInferSecondPassQuery(types.TypeQuery[bool]): +class ArgInferSecondPassQuery(types.BoolTypeQuery): """Query whether an argument type should be inferred in the second pass. The result is True if the type has a type variable in a callable return @@ -5036,17 +5308,17 @@ class ArgInferSecondPassQuery(types.TypeQuery[bool]): """ def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_callable_type(self, t: CallableType) -> bool: return self.query_types(t.arg_types) or t.accept(HasTypeVarQuery()) -class HasTypeVarQuery(types.TypeQuery[bool]): +class HasTypeVarQuery(types.BoolTypeQuery): """Visitor for querying whether a type has a type variable component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_type_var(self, t: TypeVarType) -> bool: return True @@ -5056,11 +5328,11 @@ def has_erased_component(t: Type | None) -> bool: return t is not None and t.accept(HasErasedComponentsQuery()) -class HasErasedComponentsQuery(types.TypeQuery[bool]): +class HasErasedComponentsQuery(types.BoolTypeQuery): """Visitor for querying whether a type has an erased component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_erased_type(self, t: ErasedType) -> bool: return True @@ -5070,11 +5342,11 @@ def has_uninhabited_component(t: Type | None) -> bool: return t is not None and t.accept(HasUninhabitedComponentsQuery()) -class HasUninhabitedComponentsQuery(types.TypeQuery[bool]): +class HasUninhabitedComponentsQuery(types.BoolTypeQuery): """Visitor for querying whether a type has an UninhabitedType component.""" def __init__(self) -> None: - super().__init__(any) + super().__init__(types.ANY_STRATEGY) def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return True @@ -5304,7 +5576,7 @@ def type_info_from_type(typ: Type) -> TypeInfo | None: def is_operator_method(fullname: str | None) -> bool: - if fullname is None: + if not fullname: return False short_name = fullname.split(".")[-1] return ( diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 6777c4354a04..a2c580e13446 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -6,7 +6,11 @@ from mypy import meet, message_registry, subtypes from mypy.erasetype import erase_typevars -from mypy.expandtype import expand_type_by_instance, freshen_function_type_vars +from mypy.expandtype import ( + expand_self_type, + expand_type_by_instance, + freshen_all_functions_type_vars, +) from mypy.maptype import map_instance_to_supertype from mypy.messages import MessageBuilder from mypy.nodes import ( @@ -31,13 +35,14 @@ is_final_node, ) from mypy.plugin import AttributeContext -from mypy.typeanal import set_any_tvars from mypy.typeops import ( bind_self, class_callable, erase_to_bound, function_type, + get_type_vars, make_simplified_union, + supported_self_type, tuple_fallback, type_object_type_from_function, ) @@ -64,8 +69,8 @@ TypeVarType, UnionType, get_proper_type, - has_type_vars, ) +from mypy.typetraverser import TypeTraverserVisitor if TYPE_CHECKING: # import for forward declaration only import mypy.checker @@ -90,6 +95,8 @@ def __init__( chk: mypy.checker.TypeChecker, self_type: Type | None, module_symbol_table: SymbolTable | None = None, + no_deferral: bool = False, + is_self: bool = False, ) -> None: self.is_lvalue = is_lvalue self.is_super = is_super @@ -100,6 +107,8 @@ def __init__( self.msg = msg self.chk = chk self.module_symbol_table = module_symbol_table + self.no_deferral = no_deferral + self.is_self = is_self def named_type(self, name: str) -> Instance: return self.chk.named_type(name) @@ -124,6 +133,7 @@ def copy_modified( self.chk, self.self_type, self.module_symbol_table, + self.no_deferral, ) if messages is not None: mx.msg = messages @@ -149,6 +159,8 @@ def analyze_member_access( in_literal_context: bool = False, self_type: Type | None = None, module_symbol_table: SymbolTable | None = None, + no_deferral: bool = False, + is_self: bool = False, ) -> Type: """Return the type of attribute 'name' of 'typ'. @@ -183,6 +195,8 @@ def analyze_member_access( chk=chk, self_type=self_type, module_symbol_table=module_symbol_table, + no_deferral=no_deferral, + is_self=is_self, ) result = _analyze_member_access(name, typ, mx, override_info) possible_literal = get_proper_type(result) @@ -232,8 +246,6 @@ def _analyze_member_access( elif isinstance(typ, DeletedType): mx.msg.deleted_as_rvalue(typ, mx.context) return AnyType(TypeOfAny.from_error) - if mx.chk.should_suppress_optional_error([typ]): - return AnyType(TypeOfAny.from_error) return report_missing_attribute(mx.original_type, typ, name, mx) @@ -260,8 +272,9 @@ def report_missing_attribute( override_info: TypeInfo | None = None, ) -> Type: res_type = mx.msg.has_no_attr(original_type, typ, name, mx.context, mx.module_symbol_table) - if may_be_awaitable_attribute(name, typ, mx, override_info): - mx.msg.possible_missing_await(mx.context) + if not mx.msg.prefer_simple_messages(): + if may_be_awaitable_attribute(name, typ, mx, override_info): + mx.msg.possible_missing_await(mx.context) return res_type @@ -294,6 +307,9 @@ def analyze_instance_member_access( # Look up the member. First look up the method dictionary. method = info.get_method(name) if method and not isinstance(method, Decorator): + if mx.is_super: + validate_super_call(method, mx) + if method.is_property: assert isinstance(method, OverloadedFuncDef) first_item = cast(Decorator, method.items[0]) @@ -301,13 +317,13 @@ def analyze_instance_member_access( if mx.is_lvalue: mx.msg.cant_assign_to_method(mx.context) signature = function_type(method, mx.named_type("builtins.function")) - signature = freshen_function_type_vars(signature) - if name == "__new__": + signature = freshen_all_functions_type_vars(signature) + if name == "__new__" or method.is_static: # __new__ is special and behaves like a static method -- don't strip # the first argument. pass else: - if isinstance(signature, FunctionLike) and name != "__call__": + if name != "__call__": # TODO: use proper treatment of special methods on unions instead # of this hack here and below (i.e. mx.self_type). dispatched_type = meet.meet_types(mx.original_type, typ) @@ -315,15 +331,36 @@ def analyze_instance_member_access( signature, dispatched_type, method.is_class, mx.context, name, mx.msg ) signature = bind_self(signature, mx.self_type, is_classmethod=method.is_class) + # TODO: should we skip these steps for static methods as well? + # Since generic static methods should not be allowed. typ = map_instance_to_supertype(typ, method.info) member_type = expand_type_by_instance(signature, typ) - freeze_type_vars(member_type) + freeze_all_type_vars(member_type) return member_type else: # Not a method. return analyze_member_var_access(name, typ, info, mx) +def validate_super_call(node: FuncBase, mx: MemberContext) -> None: + unsafe_super = False + if isinstance(node, FuncDef) and node.is_trivial_body: + unsafe_super = True + impl = node + elif isinstance(node, OverloadedFuncDef): + if node.impl: + impl = node.impl if isinstance(node.impl, FuncDef) else node.impl.func + unsafe_super = impl.is_trivial_body + if unsafe_super: + ret_type = ( + impl.type.ret_type + if isinstance(impl.type, CallableType) + else AnyType(TypeOfAny.unannotated) + ) + if not subtypes.is_subtype(NoneType(), ret_type): + mx.msg.unsafe_super(node.name, node.info.name, mx.context) + + def analyze_type_callable_member_access(name: str, typ: FunctionLike, mx: MemberContext) -> Type: # Class attribute. # TODO super? @@ -425,8 +462,6 @@ def analyze_none_member_access(name: str, typ: NoneType, mx: MemberContext) -> T ret_type=literal_false, fallback=mx.named_type("builtins.function"), ) - elif mx.chk.should_suppress_optional_error([typ]): - return AnyType(TypeOfAny.from_error) else: return _analyze_member_access(name, mx.named_type("builtins.object"), mx) @@ -447,6 +482,8 @@ def analyze_member_var_access( if isinstance(vv, Decorator): # The associated Var node of a decorator contains the type. v = vv.var + if mx.is_super: + validate_super_call(vv.func, mx) if isinstance(vv, TypeInfo): # If the associated variable is a TypeInfo synthesize a Var node for @@ -455,14 +492,16 @@ def analyze_member_var_access( v = Var(name, type=type_object_type(vv, mx.named_type)) v.info = info - if isinstance(vv, TypeAlias) and isinstance(get_proper_type(vv.target), Instance): + if isinstance(vv, TypeAlias): # Similar to the above TypeInfo case, we allow using # qualified type aliases in runtime context if it refers to an # instance type. For example: # class C: # A = List[int] # x = C.A() <- this is OK - typ = instance_alias_type(vv, mx.named_type) + typ = mx.chk.expr_checker.alias_type_in_runtime_context( + vv, ctx=mx.context, alias_definition=mx.is_lvalue + ) v = Var(name, type=typ) v.info = info @@ -477,6 +516,9 @@ def analyze_member_var_access( return analyze_var(name, v, itype, info, mx, implicit=implicit) elif isinstance(v, FuncDef): assert False, "Did not expect a function" + elif isinstance(v, MypyFile): + mx.chk.module_refs.add(v.fullname) + return mx.chk.expr_checker.module_type(v) elif ( not v and name not in ["__getattr__", "__setattr__", "__getattribute__"] @@ -539,12 +581,15 @@ def analyze_member_var_access( return AnyType(TypeOfAny.special_form) # Could not find the member. + if itype.extra_attrs and name in itype.extra_attrs.attrs: + # For modules use direct symbol table lookup. + if not itype.extra_attrs.mod_name: + return itype.extra_attrs.attrs[name] + if mx.is_super: mx.msg.undefined_in_superclass(name, mx.context) return AnyType(TypeOfAny.from_error) else: - if mx.chk and mx.chk.should_suppress_optional_error([itype]): - return AnyType(TypeOfAny.from_error) return report_missing_attribute(mx.original_type, itype, name, mx) @@ -593,7 +638,7 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: itype=descriptor_type, info=descriptor_type.type, self_type=descriptor_type, - name="__set__", + name="__get__", mx=mx, ) @@ -648,27 +693,12 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: return inferred_dunder_get_type.ret_type -def instance_alias_type(alias: TypeAlias, named_type: Callable[[str], Instance]) -> Type: - """Type of a type alias node targeting an instance, when appears in runtime context. - - As usual, we first erase any unbound type variables to Any. - """ - target: Type = get_proper_type(alias.target) - assert isinstance( - get_proper_type(target), Instance - ), "Must be called only with aliases to classes" - target = get_proper_type(set_any_tvars(alias, alias.line, alias.column)) - assert isinstance(target, Instance) - tp = type_object_type(target.type, named_type) - return expand_type_by_instance(tp, target) - - -def is_instance_var(var: Var, info: TypeInfo) -> bool: +def is_instance_var(var: Var) -> bool: """Return if var is an instance variable according to PEP 526.""" return ( # check the type_info node is the var (not a decorated function, etc.) - var.name in info.names - and info.names[var.name].node is var + var.name in var.info.names + and var.info.names[var.name].node is var and not var.is_classvar # variables without annotations are treated as classvar and not var.is_inferred @@ -703,12 +733,18 @@ def analyze_var( mx.msg.read_only_property(name, itype.type, mx.context) if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) - t = get_proper_type(expand_type_by_instance(typ, itype)) + t = freshen_all_functions_type_vars(typ) + if not (mx.is_self or mx.is_super) or supported_self_type( + get_proper_type(mx.original_type) + ): + t = expand_self_type(var, t, mx.original_type) + t = get_proper_type(expand_type_by_instance(t, itype)) + freeze_all_type_vars(t) result: Type = t typ = get_proper_type(typ) if ( var.is_initialized_in_class - and (not is_instance_var(var, info) or mx.is_operator) + and (not is_instance_var(var) or mx.is_operator) and isinstance(typ, FunctionLike) and not typ.is_type_obj() ): @@ -731,13 +767,16 @@ def analyze_var( # In `x.f`, when checking `x` against A1 we assume x is compatible with A # and similarly for B1 when checking against B dispatched_type = meet.meet_types(mx.original_type, itype) - signature = freshen_function_type_vars(functype) + signature = freshen_all_functions_type_vars(functype) + bound = get_proper_type(expand_self_type(var, signature, mx.original_type)) + assert isinstance(bound, FunctionLike) + signature = bound signature = check_self_arg( signature, dispatched_type, var.is_classmethod, mx.context, name, mx.msg ) signature = bind_self(signature, mx.self_type, var.is_classmethod) - expanded_signature = get_proper_type(expand_type_by_instance(signature, itype)) - freeze_type_vars(expanded_signature) + expanded_signature = expand_type_by_instance(signature, itype) + freeze_all_type_vars(expanded_signature) if var.is_property: # A property cannot have an overloaded type => the cast is fine. assert isinstance(expanded_signature, CallableType) @@ -745,7 +784,7 @@ def analyze_var( else: result = expanded_signature else: - if not var.is_ready: + if not var.is_ready and not mx.no_deferral: mx.not_ready_callback(var.name, mx.context) # Implicit 'Any' type. result = AnyType(TypeOfAny.special_form) @@ -760,16 +799,15 @@ def analyze_var( return result -def freeze_type_vars(member_type: Type) -> None: - if not isinstance(member_type, ProperType): - return - if isinstance(member_type, CallableType): - for v in member_type.variables: +def freeze_all_type_vars(member_type: Type) -> None: + member_type.accept(FreezeTypeVarsVisitor()) + + +class FreezeTypeVarsVisitor(TypeTraverserVisitor): + def visit_callable_type(self, t: CallableType) -> None: + for v in t.variables: v.id.meta_level = 0 - if isinstance(member_type, Overloaded): - for it in member_type.items: - for v in it.variables: - v.id.meta_level = 0 + super().visit_callable_type(t) def lookup_member_var_or_accessor(info: TypeInfo, name: str, is_lvalue: bool) -> SymbolNode | None: @@ -859,7 +897,11 @@ def analyze_class_attribute_access( node = info.get(name) if not node: - if info.fallback_to_any: + if itype.extra_attrs and name in itype.extra_attrs.attrs: + # For modules use direct symbol table lookup. + if not itype.extra_attrs.mod_name: + return itype.extra_attrs.attrs[name] + if info.fallback_to_any or info.meta_fallback_to_any: return apply_class_attr_hook(mx, hook, AnyType(TypeOfAny.special_form)) return None @@ -871,6 +913,10 @@ def analyze_class_attribute_access( if isinstance(node.node, TypeInfo): mx.msg.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, mx.context) + # Refuse class attribute access if slot defined + if info.slots and name in info.slots: + mx.msg.fail(message_registry.CLASS_VAR_CONFLICTS_SLOTS.format(name), mx.context) + # If a final attribute was declared on `self` in `__init__`, then it # can't be accessed on the class object. if node.implicit and isinstance(node.node, Var) and node.node.is_final: @@ -922,7 +968,12 @@ def analyze_class_attribute_access( # x: T # C.x # Error, ambiguous access # C[int].x # Also an error, since C[int] is same as C at runtime - if isinstance(t, TypeVarType) or has_type_vars(t): + # Exception is Self type wrapped in ClassVar, that is safe. + def_vars = set(node.node.info.defn.type_vars) + if not node.node.is_classvar and node.node.info.self_type: + def_vars.add(node.node.info.self_type) + typ_vars = set(get_type_vars(t)) + if def_vars & typ_vars: # Exception: access on Type[...], including first argument of class methods is OK. if not isinstance(get_proper_type(mx.original_type), TypeType) or node.implicit: if node.node.is_classvar: @@ -935,7 +986,8 @@ def analyze_class_attribute_access( # In the above example this means that we infer following types: # C.x -> Any # C[int].x -> int - t = erase_typevars(expand_type_by_instance(t, isuper)) + t = get_proper_type(expand_self_type(node.node, t, itype)) + t = erase_typevars(expand_type_by_instance(t, isuper), {tv.id for tv in def_vars}) is_classmethod = (is_decorated and cast(Decorator, node.node).func.is_class) or ( isinstance(node.node, FuncBase) and node.node.is_class @@ -967,10 +1019,10 @@ def analyze_class_attribute_access( # Reference to a module object. return mx.named_type("types.ModuleType") - if isinstance(node.node, TypeAlias) and isinstance( - get_proper_type(node.node.target), Instance - ): - return instance_alias_type(node.node, mx.named_type) + if isinstance(node.node, TypeAlias): + return mx.chk.expr_checker.alias_type_in_runtime_context( + node.node, ctx=mx.context, alias_definition=mx.is_lvalue + ) if is_decorated: assert isinstance(node.node, Decorator) @@ -1021,7 +1073,9 @@ def analyze_typeddict_access( if isinstance(mx.context, IndexExpr): # Since we can get this during `a['key'] = ...` # it is safe to assume that the context is `IndexExpr`. - item_type = mx.chk.expr_checker.visit_typeddict_index_expr(typ, mx.context.index) + item_type = mx.chk.expr_checker.visit_typeddict_index_expr( + typ, mx.context.index, setitem=True + ) else: # It can also be `a.__setitem__(...)` direct call. # In this case `item_type` can be `Any`, @@ -1093,11 +1147,11 @@ class B(A[str]): pass if isinstance(t, CallableType): tvars = original_vars if original_vars is not None else [] if is_classmethod: - t = freshen_function_type_vars(t) + t = freshen_all_functions_type_vars(t) t = bind_self(t, original_type, is_classmethod=True) assert isuper is not None t = cast(CallableType, expand_type_by_instance(t, isuper)) - freeze_type_vars(t) + freeze_all_type_vars(t) return t.copy_modified(variables=list(tvars) + list(t.variables)) elif isinstance(t, Overloaded): return Overloaded( @@ -1112,7 +1166,7 @@ class B(A[str]): pass ] ) if isuper is not None: - t = cast(ProperType, expand_type_by_instance(t, isuper)) + t = expand_type_by_instance(t, isuper) return t diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py index b8720d9402f8..603b392eee29 100644 --- a/mypy/checkpattern.py +++ b/mypy/checkpattern.py @@ -33,6 +33,7 @@ coerce_to_literal, make_simplified_union, try_getting_str_literals_from_type, + tuple_fallback, ) from mypy.types import ( AnyType, @@ -256,16 +257,13 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: contracted_inner_types = self.contract_starred_pattern_types( inner_types, star_position, required_patterns ) - can_match = True for p, t in zip(o.patterns, contracted_inner_types): pattern_type = self.accept(p, t) typ, rest, type_map = pattern_type - if is_uninhabited(typ): - can_match = False - else: - contracted_new_inner_types.append(typ) - contracted_rest_inner_types.append(rest) + contracted_new_inner_types.append(typ) + contracted_rest_inner_types.append(rest) self.update_type_map(captures, type_map) + new_inner_types = self.expand_starred_pattern_types( contracted_new_inner_types, star_position, len(inner_types) ) @@ -278,9 +276,7 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: # new_type: Type rest_type: Type = current_type - if not can_match: - new_type = UninhabitedType() - elif isinstance(current_type, TupleType): + if isinstance(current_type, TupleType): narrowed_inner_types = [] inner_rest_types = [] for inner_type, new_inner_type in zip(inner_types, new_inner_types): @@ -325,7 +321,9 @@ def get_sequence_type(self, t: Type) -> Type | None: else: return None - if self.chk.type_is_iterable(t) and isinstance(t, Instance): + if self.chk.type_is_iterable(t) and isinstance(t, (Instance, TupleType)): + if isinstance(t, TupleType): + t = tuple_fallback(t) return self.chk.iterable_item_type(t) else: return None @@ -645,6 +643,9 @@ def construct_sequence_child(self, outer_type: Type, inner_type: Type) -> Type: For example: construct_sequence_child(List[int], str) = List[str] + + TODO: this doesn't make sense. For example if one has class S(Sequence[int], Generic[T]) + or class T(Sequence[Tuple[T, T]]), there is no way any of those can map to Sequence[str]. """ proper_type = get_proper_type(outer_type) if isinstance(proper_type, UnionType): @@ -657,6 +658,8 @@ def construct_sequence_child(self, outer_type: Type, inner_type: Type) -> Type: sequence = self.chk.named_generic_type("typing.Sequence", [inner_type]) if is_subtype(outer_type, self.chk.named_type("typing.Sequence")): proper_type = get_proper_type(outer_type) + if isinstance(proper_type, TupleType): + proper_type = tuple_fallback(proper_type) assert isinstance(proper_type, Instance) empty_type = fill_typevars(proper_type.type) partial_type = expand_type_by_instance(empty_type, sequence) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 55cc0fea3720..190782a3bded 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -8,6 +8,8 @@ import sys from io import StringIO +from mypy.errorcodes import error_codes + if sys.version_info >= (3, 11): import tomllib else: @@ -69,6 +71,15 @@ def try_split(v: str | Sequence[str], split_regex: str = "[,]") -> list[str]: return [p.strip() for p in v] +def validate_codes(codes: list[str]) -> list[str]: + invalid_codes = set(codes) - set(error_codes.keys()) + if invalid_codes: + raise argparse.ArgumentTypeError( + f"Invalid error code(s): {', '.join(sorted(invalid_codes))}" + ) + return codes + + def expand_path(path: str) -> str: """Expand the user home directory and any environment variables contained within the provided path. @@ -126,34 +137,42 @@ def check_follow_imports(choice: str) -> str: return choice +def split_commas(value: str) -> list[str]: + # Uses a bit smarter technique to allow last trailing comma + # and to remove last `""` item from the split. + items = value.split(",") + if items and items[-1] == "": + items.pop(-1) + return items + + # For most options, the type of the default value set in options.py is # sufficient, and we don't have to do anything here. This table # exists to specify types for values initialized to None or container # types. ini_config_types: Final[dict[str, _INI_PARSER_CALLABLE]] = { "python_version": parse_version, - "strict_optional_whitelist": lambda s: s.split(), "custom_typing_module": str, "custom_typeshed_dir": expand_path, "mypy_path": lambda s: [expand_path(p.strip()) for p in re.split("[,:]", s)], "files": split_and_match_files, "quickstart_file": expand_path, "junit_xml": expand_path, - # These two are for backwards compatibility - "silent_imports": bool, - "almost_silent": bool, "follow_imports": check_follow_imports, "no_site_packages": bool, - "plugins": lambda s: [p.strip() for p in s.split(",")], - "always_true": lambda s: [p.strip() for p in s.split(",")], - "always_false": lambda s: [p.strip() for p in s.split(",")], - "disable_error_code": lambda s: [p.strip() for p in s.split(",")], - "enable_error_code": lambda s: [p.strip() for p in s.split(",")], - "package_root": lambda s: [p.strip() for p in s.split(",")], + "plugins": lambda s: [p.strip() for p in split_commas(s)], + "always_true": lambda s: [p.strip() for p in split_commas(s)], + "always_false": lambda s: [p.strip() for p in split_commas(s)], + "enable_incomplete_feature": lambda s: [p.strip() for p in split_commas(s)], + "disable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), + "enable_error_code": lambda s: validate_codes([p.strip() for p in split_commas(s)]), + "package_root": lambda s: [p.strip() for p in split_commas(s)], "cache_dir": expand_path, "python_executable": expand_path, "strict": bool, "exclude": lambda s: [s.strip()], + "packages": try_split, + "modules": try_split, } # Reuse the ini_config_types and overwrite the diff @@ -161,17 +180,19 @@ def check_follow_imports(choice: str) -> str: toml_config_types.update( { "python_version": parse_version, - "strict_optional_whitelist": try_split, "mypy_path": lambda s: [expand_path(p) for p in try_split(s, "[,:]")], "files": lambda s: split_and_match_files_list(try_split(s)), "follow_imports": lambda s: check_follow_imports(str(s)), "plugins": try_split, "always_true": try_split, "always_false": try_split, - "disable_error_code": try_split, - "enable_error_code": try_split, + "enable_incomplete_feature": try_split, + "disable_error_code": lambda s: validate_codes(try_split(s)), + "enable_error_code": lambda s: validate_codes(try_split(s)), "package_root": try_split, "exclude": str_or_array_as_list, + "packages": try_split, + "modules": try_split, } ) @@ -263,6 +284,7 @@ def parse_config_file( file=stderr, ) updates = {k: v for k, v in updates.items() if k in PER_MODULE_OPTIONS} + globs = name[5:] for glob in globs.split(","): # For backwards compatibility, replace (back)slashes with dots. @@ -424,6 +446,9 @@ def parse_section( elif key.startswith("disallow") and hasattr(template, key[3:]): options_key = key[3:] invert = True + elif key.startswith("show_") and hasattr(template, "hide_" + key[5:]): + options_key = "hide_" + key[5:] + invert = True elif key == "strict": pass # Special handling below else: @@ -461,26 +486,14 @@ def parse_section( if v: set_strict_flags() continue - if key == "silent_imports": - print( - "%ssilent_imports has been replaced by " - "ignore_missing_imports=True; follow_imports=skip" % prefix, - file=stderr, - ) - if v: - if "ignore_missing_imports" not in results: - results["ignore_missing_imports"] = True - if "follow_imports" not in results: - results["follow_imports"] = "skip" - if key == "almost_silent": - print( - "%salmost_silent has been replaced by " "follow_imports=error" % prefix, - file=stderr, - ) - if v: - if "follow_imports" not in results: - results["follow_imports"] = "error" results[options_key] = v + + # These two flags act as per-module overrides, so store the empty defaults. + if "disable_error_code" not in results: + results["disable_error_code"] = [] + if "enable_error_code" not in results: + results["enable_error_code"] = [] + return results, report_dirs diff --git a/mypy/constant_fold.py b/mypy/constant_fold.py new file mode 100644 index 000000000000..a22c1b9ba9e5 --- /dev/null +++ b/mypy/constant_fold.py @@ -0,0 +1,116 @@ +"""Constant folding of expressions. + +For example, 3 + 5 can be constant folded into 8. +""" + +from __future__ import annotations + +from typing import Union +from typing_extensions import Final + +from mypy.nodes import Expression, FloatExpr, IntExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var + +# All possible result types of constant folding +ConstantValue = Union[int, bool, float, str] +CONST_TYPES: Final = (int, bool, float, str) + + +def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | None: + """Return the constant value of an expression for supported operations. + + Among other things, support int arithmetic and string + concatenation. For example, the expression 3 + 5 has the constant + value 8. + + Also bind simple references to final constants defined in the + current module (cur_mod_id). Binding to references is best effort + -- we don't bind references to other modules. Mypyc trusts these + to be correct in compiled modules, so that it can replace a + constant expression (or a reference to one) with the statically + computed value. We don't want to infer constant values based on + stubs, in particular, as these might not match the implementation + (due to version skew, for example). + + Return None if unsuccessful. + """ + if isinstance(expr, IntExpr): + return expr.value + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, FloatExpr): + return expr.value + elif isinstance(expr, NameExpr): + if expr.name == "True": + return True + elif expr.name == "False": + return False + node = expr.node + if ( + isinstance(node, Var) + and node.is_final + and node.fullname.rsplit(".", 1)[0] == cur_mod_id + ): + value = node.final_value + if isinstance(value, (CONST_TYPES)): + return value + elif isinstance(expr, OpExpr): + left = constant_fold_expr(expr.left, cur_mod_id) + right = constant_fold_expr(expr.right, cur_mod_id) + if isinstance(left, int) and isinstance(right, int): + return constant_fold_binary_int_op(expr.op, left, right) + elif isinstance(left, str) and isinstance(right, str): + return constant_fold_binary_str_op(expr.op, left, right) + elif isinstance(expr, UnaryExpr): + value = constant_fold_expr(expr.expr, cur_mod_id) + if isinstance(value, int): + return constant_fold_unary_int_op(expr.op, value) + return None + + +def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: + if op == "+": + return left + right + if op == "-": + return left - right + elif op == "*": + return left * right + elif op == "//": + if right != 0: + return left // right + elif op == "%": + if right != 0: + return left % right + elif op == "&": + return left & right + elif op == "|": + return left | right + elif op == "^": + return left ^ right + elif op == "<<": + if right >= 0: + return left << right + elif op == ">>": + if right >= 0: + return left >> right + elif op == "**": + if right >= 0: + ret = left**right + assert isinstance(ret, int) + return ret + return None + + +def constant_fold_unary_int_op(op: str, value: int) -> int | None: + if op == "-": + return -value + elif op == "~": + return ~value + elif op == "+": + return value + return None + + +def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: + if op == "+": + return left + right + return None diff --git a/mypy/constraints.py b/mypy/constraints.py index f9cc68a0a7eb..a8f04094ca63 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -29,11 +29,11 @@ Type, TypeAliasType, TypedDictType, - TypeList, TypeOfAny, TypeQuery, TypeType, TypeVarId, + TypeVarLikeType, TypeVarTupleType, TypeVarType, TypeVisitor, @@ -48,11 +48,11 @@ is_named_instance, is_union_with_any, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.typevartuples import ( extract_unpack, find_unpack_in_list, - split_with_instance, + split_with_mapped_and_template, split_with_prefix_and_suffix, ) @@ -73,10 +73,11 @@ class Constraint: op = 0 # SUBTYPE_OF or SUPERTYPE_OF target: Type - def __init__(self, type_var: TypeVarId, op: int, target: Type) -> None: - self.type_var = type_var + def __init__(self, type_var: TypeVarLikeType, op: int, target: Type) -> None: + self.type_var = type_var.id self.op = op self.target = target + self.origin_type_var = type_var def __repr__(self) -> str: op_str = "<:" @@ -108,16 +109,66 @@ def infer_constraints_for_callable( mapper = ArgTypeExpander(context) for i, actuals in enumerate(formal_to_actual): - for actual in actuals: - actual_arg_type = arg_types[actual] - if actual_arg_type is None: - continue + if isinstance(callee.arg_types[i], UnpackType): + unpack_type = callee.arg_types[i] + assert isinstance(unpack_type, UnpackType) + + # In this case we are binding all of the actuals to *args + # and we want a constraint that the typevar tuple being unpacked + # is equal to a type list of all the actuals. + actual_types = [] + for actual in actuals: + actual_arg_type = arg_types[actual] + if actual_arg_type is None: + continue - actual_type = mapper.expand_actual_type( - actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] - ) - c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) - constraints.extend(c) + actual_types.append( + mapper.expand_actual_type( + actual_arg_type, + arg_kinds[actual], + callee.arg_names[i], + callee.arg_kinds[i], + ) + ) + + unpacked_type = get_proper_type(unpack_type.type) + if isinstance(unpacked_type, TypeVarTupleType): + constraints.append( + Constraint( + unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types, unpacked_type.tuple_fallback), + ) + ) + elif isinstance(unpacked_type, TupleType): + # Prefixes get converted to positional args, so technically the only case we + # should have here is like Tuple[Unpack[Ts], Y1, Y2, Y3]. If this turns out + # not to hold we can always handle the prefixes too. + inner_unpack = unpacked_type.items[0] + assert isinstance(inner_unpack, UnpackType) + inner_unpacked_type = get_proper_type(inner_unpack.type) + assert isinstance(inner_unpacked_type, TypeVarTupleType) + suffix_len = len(unpacked_type.items) - 1 + constraints.append( + Constraint( + inner_unpacked_type, + SUPERTYPE_OF, + TupleType(actual_types[:-suffix_len], inner_unpacked_type.tuple_fallback), + ) + ) + else: + assert False, "mypy bug: unhandled constraint inference case" + else: + for actual in actuals: + actual_arg_type = arg_types[actual] + if actual_arg_type is None: + continue + + actual_type = mapper.expand_actual_type( + actual_arg_type, arg_kinds[actual], callee.arg_names[i], callee.arg_kinds[i] + ) + c = infer_constraints(callee.arg_types[i], actual_type, SUPERTYPE_OF) + constraints.extend(c) return constraints @@ -146,23 +197,23 @@ def infer_constraints(template: Type, actual: Type, direction: int) -> list[Cons if any( get_proper_type(template) == get_proper_type(t) and get_proper_type(actual) == get_proper_type(a) - for (t, a) in reversed(TypeState.inferring) + for (t, a) in reversed(type_state.inferring) ): return [] - if has_recursive_types(template): + if has_recursive_types(template) or isinstance(get_proper_type(template), Instance): # This case requires special care because it may cause infinite recursion. + # Note that we include Instances because the may be recursive as str(Sequence[str]). if not has_type_vars(template): # Return early on an empty branch. return [] - TypeState.inferring.append((template, actual)) + type_state.inferring.append((template, actual)) res = _infer_constraints(template, actual, direction) - TypeState.inferring.pop() + type_state.inferring.pop() return res return _infer_constraints(template, actual, direction) def _infer_constraints(template: Type, actual: Type, direction: int) -> list[Constraint]: - orig_template = template template = get_proper_type(template) actual = get_proper_type(actual) @@ -190,7 +241,7 @@ def _infer_constraints(template: Type, actual: Type, direction: int) -> list[Con # T :> U2", but they are not equivalent to the constraint solver, # which never introduces new Union types (it uses join() instead). if isinstance(template, TypeVarType): - return [Constraint(template.id, direction, actual)] + return [Constraint(template, direction, actual)] # Now handle the case of either template or actual being a Union. # For a Union to be a subtype of another type, every item of the Union @@ -286,7 +337,7 @@ def merge_with_any(constraint: Constraint) -> Constraint: # TODO: if we will support multiple sources Any, use this here instead. any_type = AnyType(TypeOfAny.implementation_artifact) return Constraint( - constraint.type_var, + constraint.origin_type_var, constraint.op, UnionType.make_union([target, any_type], target.line, target.column), ) @@ -345,11 +396,41 @@ def any_constraints(options: list[list[Constraint] | None], eager: bool) -> list merged_option = None merged_options.append(merged_option) return any_constraints(list(merged_options), eager) + + # If normal logic didn't work, try excluding trivially unsatisfiable constraint (due to + # upper bounds) from each option, and comparing them again. + filtered_options = [filter_satisfiable(o) for o in options] + if filtered_options != options: + return any_constraints(filtered_options, eager=eager) + # Otherwise, there are either no valid options or multiple, inconsistent valid # options. Give up and deduce nothing. return [] +def filter_satisfiable(option: list[Constraint] | None) -> list[Constraint] | None: + """Keep only constraints that can possibly be satisfied. + + Currently, we filter out constraints where target is not a subtype of the upper bound. + Since those can be never satisfied. We may add more cases in future if it improves type + inference. + """ + if not option: + return option + satisfiable = [] + for c in option: + if isinstance(c.origin_type_var, TypeVarType) and c.origin_type_var.values: + if any( + mypy.subtypes.is_subtype(c.target, value) for value in c.origin_type_var.values + ): + satisfiable.append(c) + elif mypy.subtypes.is_subtype(c.target, c.origin_type_var.upper_bound): + satisfiable.append(c) + if not satisfiable: + return None + return satisfiable + + def is_same_constraints(x: list[Constraint], y: list[Constraint]) -> bool: for c1 in x: if not any(is_same_constraint(c1, c2) for c2 in y): @@ -484,7 +565,7 @@ def visit_type_var_tuple(self, template: TypeVarTupleType) -> list[Constraint]: raise NotImplementedError def visit_unpack_type(self, template: UnpackType) -> list[Constraint]: - raise NotImplementedError + raise RuntimeError("Mypy bug: unpack should be handled at a higher level.") def visit_parameters(self, template: Parameters) -> list[Constraint]: # constraining Any against C[P] turns into infer_against_any([P], Any) @@ -499,7 +580,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: original_actual = actual = self.actual res: list[Constraint] = [] if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol: - if template.type.protocol_members == ["__call__"]: + if "__call__" in template.type.protocol_members: # Special case: a generic callback protocol if not any(template == t for t in template.type.inferring): template.type.inferring.append(template) @@ -511,9 +592,45 @@ def visit_instance(self, template: Instance) -> list[Constraint]: subres = infer_constraints(call, actual, self.direction) res.extend(subres) template.type.inferring.pop() - return res if isinstance(actual, CallableType) and actual.fallback is not None: + if actual.is_type_obj() and template.type.is_protocol: + ret_type = get_proper_type(actual.ret_type) + if isinstance(ret_type, TupleType): + ret_type = mypy.typeops.tuple_fallback(ret_type) + if isinstance(ret_type, Instance): + if self.direction == SUBTYPE_OF: + subtype = template + else: + subtype = ret_type + res.extend( + self.infer_constraints_from_protocol_members( + ret_type, template, subtype, template, class_obj=True + ) + ) actual = actual.fallback + if isinstance(actual, TypeType) and template.type.is_protocol: + if isinstance(actual.item, Instance): + if self.direction == SUBTYPE_OF: + subtype = template + else: + subtype = actual.item + res.extend( + self.infer_constraints_from_protocol_members( + actual.item, template, subtype, template, class_obj=True + ) + ) + if self.direction == SUPERTYPE_OF: + # Infer constraints for Type[T] via metaclass of T when it makes sense. + a_item = actual.item + if isinstance(a_item, TypeVarType): + a_item = get_proper_type(a_item.upper_bound) + if isinstance(a_item, Instance) and a_item.type.metaclass_type: + res.extend( + self.infer_constraints_from_protocol_members( + a_item.type.metaclass_type, template, actual, template + ) + ) + if isinstance(actual, Overloaded) and actual.fallback is not None: actual = actual.fallback if isinstance(actual, TypedDictType): @@ -523,15 +640,43 @@ def visit_instance(self, template: Instance) -> list[Constraint]: if isinstance(actual, Instance): instance = actual erased = erase_typevars(template) - assert isinstance(erased, Instance) # type: ignore + assert isinstance(erased, Instance) # type: ignore[misc] # We always try nominal inference if possible, # it is much faster than the structural one. if self.direction == SUBTYPE_OF and template.type.has_base(instance.type.fullname): mapped = map_instance_to_supertype(template, instance.type) tvars = mapped.type.defn.type_vars + + if instance.type.has_type_var_tuple_type: + assert instance.type.type_var_tuple_prefix is not None + assert instance.type.type_var_tuple_suffix is not None + assert mapped.type.type_var_tuple_prefix is not None + assert mapped.type.type_var_tuple_suffix is not None + + unpack_constraints, mapped_args, instance_args = build_constraints_for_unpack( + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, + instance.args, + instance.type.type_var_tuple_prefix, + instance.type.type_var_tuple_suffix, + self.direction, + ) + res.extend(unpack_constraints) + + tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( + tuple(tvars), + instance.type.type_var_tuple_prefix, + instance.type.type_var_tuple_suffix, + ) + tvars = list(tvars_prefix + tvars_suffix) + else: + mapped_args = mapped.args + instance_args = instance.args + # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. - for tvar, mapped_arg, instance_arg in zip(tvars, mapped.args, instance.args): + for tvar, mapped_arg, instance_arg in zip(tvars, mapped_args, instance_args): # TODO(PEP612): More ParamSpec work (or is Parameters the only thing accepted) if isinstance(tvar, TypeVarType): # The constraints for generic type parameters depend on variance. @@ -560,47 +705,34 @@ def visit_instance(self, template: Instance) -> list[Constraint]: suffix.arg_kinds[len(prefix.arg_kinds) :], suffix.arg_names[len(prefix.arg_names) :], ) - res.append(Constraint(mapped_arg.id, SUPERTYPE_OF, suffix)) + res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) elif isinstance(suffix, ParamSpecType): - res.append(Constraint(mapped_arg.id, SUPERTYPE_OF, suffix)) - elif isinstance(tvar, TypeVarTupleType): - raise NotImplementedError + res.append(Constraint(mapped_arg, SUPERTYPE_OF, suffix)) + else: + # This case should have been handled above. + assert not isinstance(tvar, TypeVarTupleType) return res elif self.direction == SUPERTYPE_OF and instance.type.has_base(template.type.fullname): mapped = map_instance_to_supertype(instance, template.type) tvars = template.type.defn.type_vars if template.type.has_type_var_tuple_type: - mapped_prefix, mapped_middle, mapped_suffix = split_with_instance(mapped) - template_prefix, template_middle, template_suffix = split_with_instance( - template - ) - - # Add a constraint for the type var tuple, and then - # remove it for the case below. - template_unpack = extract_unpack(template_middle) - if template_unpack is not None: - if isinstance(template_unpack, TypeVarTupleType): - res.append( - Constraint( - template_unpack.id, SUPERTYPE_OF, TypeList(list(mapped_middle)) - ) - ) - elif ( - isinstance(template_unpack, Instance) - and template_unpack.type.fullname == "builtins.tuple" - ): - # TODO: check homogenous tuple case - raise NotImplementedError - elif isinstance(template_unpack, TupleType): - # TODO: check tuple case - raise NotImplementedError - - mapped_args = mapped_prefix + mapped_suffix - template_args = template_prefix + template_suffix - + assert mapped.type.type_var_tuple_prefix is not None + assert mapped.type.type_var_tuple_suffix is not None assert template.type.type_var_tuple_prefix is not None assert template.type.type_var_tuple_suffix is not None + + unpack_constraints, mapped_args, template_args = build_constraints_for_unpack( + mapped.args, + mapped.type.type_var_tuple_prefix, + mapped.type.type_var_tuple_suffix, + template.args, + template.type.type_var_tuple_prefix, + template.type.type_var_tuple_suffix, + self.direction, + ) + res.extend(unpack_constraints) + tvars_prefix, _, tvars_suffix = split_with_prefix_and_suffix( tuple(tvars), template.type.type_var_tuple_prefix, @@ -644,9 +776,12 @@ def visit_instance(self, template: Instance) -> list[Constraint]: suffix.arg_kinds[len(prefix.arg_kinds) :], suffix.arg_names[len(prefix.arg_names) :], ) - res.append(Constraint(template_arg.id, SUPERTYPE_OF, suffix)) + res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) elif isinstance(suffix, ParamSpecType): - res.append(Constraint(template_arg.id, SUPERTYPE_OF, suffix)) + res.append(Constraint(template_arg, SUPERTYPE_OF, suffix)) + else: + # This case should have been handled above. + assert not isinstance(tvar, TypeVarTupleType) return res if ( template.type.is_protocol @@ -661,7 +796,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: # because some type may be considered a subtype of a protocol # due to _promote, but still not implement the protocol. not any(template == t for t in reversed(template.type.inferring)) - and mypy.subtypes.is_protocol_implementation(instance, erased) + and mypy.subtypes.is_protocol_implementation(instance, erased, skip=["__call__"]) ): template.type.inferring.append(template) res.extend( @@ -677,7 +812,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: and # We avoid infinite recursion for structural subtypes also here. not any(instance == i for i in reversed(instance.type.inferring)) - and mypy.subtypes.is_protocol_implementation(erased, instance) + and mypy.subtypes.is_protocol_implementation(erased, instance, skip=["__call__"]) ): instance.type.inferring.append(instance) res.extend( @@ -687,6 +822,9 @@ def visit_instance(self, template: Instance) -> list[Constraint]: ) instance.type.inferring.pop() return res + if res: + return res + if isinstance(actual, AnyType): return self.infer_against_any(template.args, actual) if ( @@ -712,7 +850,12 @@ def visit_instance(self, template: Instance) -> list[Constraint]: return [] def infer_constraints_from_protocol_members( - self, instance: Instance, template: Instance, subtype: Type, protocol: Instance + self, + instance: Instance, + template: Instance, + subtype: Type, + protocol: Instance, + class_obj: bool = False, ) -> list[Constraint]: """Infer constraints for situations where either 'template' or 'instance' is a protocol. @@ -722,22 +865,28 @@ def infer_constraints_from_protocol_members( """ res = [] for member in protocol.type.protocol_members: - inst = mypy.subtypes.find_member(member, instance, subtype) + inst = mypy.subtypes.find_member(member, instance, subtype, class_obj=class_obj) temp = mypy.subtypes.find_member(member, template, subtype) if inst is None or temp is None: + if member == "__call__": + continue return [] # See #11020 # The above is safe since at this point we know that 'instance' is a subtype # of (erased) 'template', therefore it defines all protocol members res.extend(infer_constraints(temp, inst, self.direction)) - if mypy.subtypes.IS_SETTABLE in mypy.subtypes.get_member_flags(member, protocol.type): + if mypy.subtypes.IS_SETTABLE in mypy.subtypes.get_member_flags(member, protocol): # Settable members are invariant, add opposite constraints res.extend(infer_constraints(temp, inst, neg_op(self.direction))) return res def visit_callable_type(self, template: CallableType) -> list[Constraint]: + # Normalize callables before matching against each other. + # Note that non-normalized callables can be created in annotations + # using e.g. callback protocols. + template = template.with_unpacked_kwargs() if isinstance(self.actual, CallableType): res: list[Constraint] = [] - cactual = self.actual + cactual = self.actual.with_unpacked_kwargs() param_spec = template.param_spec() if param_spec is None: # FIX verify argument counts @@ -746,12 +895,28 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: # We can't infer constraints from arguments if the template is Callable[..., T] # (with literal '...'). if not template.is_ellipsis_args: + if find_unpack_in_list(template.arg_types) is not None: + ( + unpack_constraints, + cactual_args_t, + template_args_t, + ) = find_and_build_constraints_for_unpack( + tuple(cactual.arg_types), tuple(template.arg_types), self.direction + ) + template_args = list(template_args_t) + cactual_args = list(cactual_args_t) + res.extend(unpack_constraints) + assert len(template_args) == len(cactual_args) + else: + template_args = template.arg_types + cactual_args = cactual.arg_types # The lengths should match, but don't crash (it will error elsewhere). - for t, a in zip(template.arg_types, cactual.arg_types): + for t, a in zip(template_args, cactual_args): # Negate direction due to function argument type contravariance. res.extend(infer_constraints(t, a, neg_op(self.direction))) else: # sometimes, it appears we try to get constraints between two paramspec callables? + # TODO: Direction # TODO: check the prefixes match prefix = param_spec.prefix @@ -763,18 +928,18 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: prefix_len = min(prefix_len, max_prefix_len) res.append( Constraint( - param_spec.id, + param_spec, SUBTYPE_OF, cactual.copy_modified( arg_types=cactual.arg_types[prefix_len:], arg_kinds=cactual.arg_kinds[prefix_len:], arg_names=cactual.arg_names[prefix_len:], - ret_type=NoneType(), + ret_type=UninhabitedType(), ), ) ) else: - res.append(Constraint(param_spec.id, SUBTYPE_OF, cactual_ps)) + res.append(Constraint(param_spec, SUBTYPE_OF, cactual_ps)) # compare prefixes cactual_prefix = cactual.copy_modified( @@ -805,7 +970,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: else: res = [ Constraint( - param_spec.id, + param_spec, SUBTYPE_OF, callable_with_ellipsis(any_type, any_type, template.fallback), ) @@ -841,58 +1006,53 @@ def infer_against_overloaded( return infer_constraints(template, item, self.direction) def visit_tuple_type(self, template: TupleType) -> list[Constraint]: + actual = self.actual - # TODO: Support subclasses of Tuple + unpack_index = find_unpack_in_list(template.items) is_varlength_tuple = ( isinstance(actual, Instance) and actual.type.fullname == "builtins.tuple" ) - unpack_index = find_unpack_in_list(template.items) - - if unpack_index is not None: - unpack_item = get_proper_type(template.items[unpack_index]) - assert isinstance(unpack_item, UnpackType) - unpacked_type = get_proper_type(unpack_item.type) - if isinstance(unpacked_type, TypeVarTupleType): + if isinstance(actual, TupleType) or is_varlength_tuple: + res: list[Constraint] = [] + if unpack_index is not None: if is_varlength_tuple: - # This case is only valid when the unpack is the only - # item in the tuple. - # - # TODO: We should support this in the case that all the items - # in the tuple besides the unpack have the same type as the - # varlength tuple's type. E.g. Tuple[int, ...] should be valid - # where we expect Tuple[int, Unpack[Ts]], but not for Tuple[str, Unpack[Ts]]. - assert len(template.items) == 1 - - if isinstance(actual, (TupleType, AnyType)) or is_varlength_tuple: - modified_actual = actual - if isinstance(actual, TupleType): - # Exclude the items from before and after the unpack index. - # TODO: Support including constraints from the prefix/suffix. - _, actual_items, _ = split_with_prefix_and_suffix( - tuple(actual.items), - unpack_index, - len(template.items) - unpack_index - 1, - ) - modified_actual = actual.copy_modified(items=list(actual_items)) - return [ - Constraint( - type_var=unpacked_type.id, op=self.direction, target=modified_actual - ) - ] + unpack_type = template.items[unpack_index] + assert isinstance(unpack_type, UnpackType) + unpacked_type = unpack_type.type + assert isinstance(unpacked_type, TypeVarTupleType) + return [Constraint(type_var=unpacked_type, op=self.direction, target=actual)] + else: + assert isinstance(actual, TupleType) + ( + unpack_constraints, + actual_items, + template_items, + ) = find_and_build_constraints_for_unpack( + tuple(actual.items), tuple(template.items), self.direction + ) + res.extend(unpack_constraints) + elif isinstance(actual, TupleType): + actual_items = tuple(actual.items) + template_items = tuple(template.items) + else: + return res - if isinstance(actual, TupleType) and len(actual.items) == len(template.items): - if ( - actual.partial_fallback.type.is_named_tuple - and template.partial_fallback.type.is_named_tuple - ): - # For named tuples using just the fallbacks usually gives better results. - return infer_constraints( - template.partial_fallback, actual.partial_fallback, self.direction - ) - res: list[Constraint] = [] - for i in range(len(template.items)): - res.extend(infer_constraints(template.items[i], actual.items[i], self.direction)) + # Cases above will return if actual wasn't a TupleType. + assert isinstance(actual, TupleType) + if len(actual_items) == len(template_items): + if ( + actual.partial_fallback.type.is_named_tuple + and template.partial_fallback.type.is_named_tuple + ): + # For named tuples using just the fallbacks usually gives better results. + return res + infer_constraints( + template.partial_fallback, actual.partial_fallback, self.direction + ) + for i in range(len(template_items)): + res.extend( + infer_constraints(template_items[i], actual_items[i], self.direction) + ) return res elif isinstance(actual, AnyType): return self.infer_against_any(template.items, actual) @@ -925,10 +1085,13 @@ def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]: def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> list[Constraint]: res: list[Constraint] = [] for t in types: - # Note that we ignore variance and simply always use the - # original direction. This is because for Any targets direction is - # irrelevant in most cases, see e.g. is_same_constraint(). - res.extend(infer_constraints(t, any_type, self.direction)) + if isinstance(t, UnpackType) and isinstance(t.type, TypeVarTupleType): + res.append(Constraint(t.type, self.direction, any_type)) + else: + # Note that we ignore variance and simply always use the + # original direction. This is because for Any targets direction is + # irrelevant in most cases, see e.g. is_same_constraint(). + res.extend(infer_constraints(t, any_type, self.direction)) return res def visit_overloaded(self, template: Overloaded) -> list[Constraint]: @@ -998,3 +1161,85 @@ def find_matching_overload_items( # it maintains backward compatibility. res = items[:] return res + + +def find_and_build_constraints_for_unpack( + mapped: tuple[Type, ...], template: tuple[Type, ...], direction: int +) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + mapped_prefix_len = find_unpack_in_list(mapped) + if mapped_prefix_len is not None: + mapped_suffix_len: int | None = len(mapped) - mapped_prefix_len - 1 + else: + mapped_suffix_len = None + + template_prefix_len = find_unpack_in_list(template) + assert template_prefix_len is not None + template_suffix_len = len(template) - template_prefix_len - 1 + + return build_constraints_for_unpack( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + direction, + ) + + +def build_constraints_for_unpack( + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, + direction: int, +) -> tuple[list[Constraint], tuple[Type, ...], tuple[Type, ...]]: + if mapped_prefix_len is None: + mapped_prefix_len = template_prefix_len + if mapped_suffix_len is None: + mapped_suffix_len = template_suffix_len + + split_result = split_with_mapped_and_template( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + ) + assert split_result is not None + ( + mapped_prefix, + mapped_middle, + mapped_suffix, + template_prefix, + template_middle, + template_suffix, + ) = split_result + + template_unpack = extract_unpack(template_middle) + res = [] + + if template_unpack is not None: + if isinstance(template_unpack, TypeVarTupleType): + res.append( + Constraint( + template_unpack, + direction, + TupleType(list(mapped_middle), template_unpack.tuple_fallback), + ) + ) + elif ( + isinstance(template_unpack, Instance) + and template_unpack.type.fullname == "builtins.tuple" + ): + for item in mapped_middle: + res.extend(infer_constraints(template_unpack.args[0], item, direction)) + + elif isinstance(template_unpack, TupleType): + if len(template_unpack.items) == len(mapped_middle): + for template_arg, item in zip(template_unpack.items, mapped_middle): + res.extend(infer_constraints(template_arg, item, direction)) + return (res, mapped_prefix + mapped_suffix, template_prefix + template_suffix) diff --git a/mypy/copytype.py b/mypy/copytype.py index baa1ba34cbac..6024e527705b 100644 --- a/mypy/copytype.py +++ b/mypy/copytype.py @@ -94,7 +94,7 @@ def visit_parameters(self, t: Parameters) -> ProperType: return self.copy_common(t, dup) def visit_type_var_tuple(self, t: TypeVarTupleType) -> ProperType: - dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound) + dup = TypeVarTupleType(t.name, t.fullname, t.id, t.upper_bound, t.tuple_fallback) return self.copy_common(t, dup) def visit_unpack_type(self, t: UnpackType) -> ProperType: diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py index 8a4027aa8262..efa1b5f01288 100644 --- a/mypy/dmypy/client.py +++ b/mypy/dmypy/client.py @@ -19,7 +19,7 @@ from mypy.dmypy_os import alive, kill from mypy.dmypy_util import DEFAULT_STATUS_FILE, receive from mypy.ipc import IPCClient, IPCException -from mypy.util import check_python_version, get_terminal_width +from mypy.util import check_python_version, get_terminal_width, should_force_color from mypy.version import __version__ # Argument parser. Subparsers are tied to action functions by the @@ -653,7 +653,7 @@ def request( args["command"] = command # Tell the server whether this request was initiated from a human-facing terminal, # so that it can format the type checking output accordingly. - args["is_tty"] = sys.stdout.isatty() or int(os.getenv("MYPY_FORCE_COLOR", "0")) > 0 + args["is_tty"] = sys.stdout.isatty() or should_force_color() args["terminal_width"] = get_terminal_width() bdata = json.dumps(args).encode("utf8") _, name = get_status(status_file) @@ -665,6 +665,10 @@ def request( return {"error": str(err)} # TODO: Other errors, e.g. ValueError, UnicodeError else: + # Display debugging output written to stdout in the server process for convenience. + stdout = response.get("stdout") + if stdout: + sys.stdout.write(stdout) return response diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 4b12bcbe9a29..7227cd559946 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -197,7 +197,7 @@ def __init__(self, options: Options, status_file: str, timeout: int | None = Non # Since the object is created in the parent process we can check # the output terminal options here. - self.formatter = FancyFormatter(sys.stdout, sys.stderr, options.show_error_codes) + self.formatter = FancyFormatter(sys.stdout, sys.stderr, options.hide_error_codes) def _response_metadata(self) -> dict[str, str]: py_version = f"{self.options.python_version[0]}_{self.options.python_version[1]}" @@ -214,6 +214,8 @@ def serve(self) -> None: while True: with server: data = receive(server) + debug_stdout = io.StringIO() + sys.stdout = debug_stdout resp: dict[str, Any] = {} if "command" not in data: resp = {"error": "No command found in request"} @@ -230,8 +232,10 @@ def serve(self) -> None: tb = traceback.format_exception(*sys.exc_info()) resp = {"error": "Daemon crashed!\n" + "".join(tb)} resp.update(self._response_metadata()) + resp["stdout"] = debug_stdout.getvalue() server.write(json.dumps(resp).encode("utf8")) raise + resp["stdout"] = debug_stdout.getvalue() try: resp.update(self._response_metadata()) server.write(json.dumps(resp).encode("utf8")) @@ -267,7 +271,9 @@ def run_command(self, command: str, data: dict[str, object]) -> dict[str, object # Only the above commands use some error formatting. del data["is_tty"] del data["terminal_width"] - return method(self, **data) + ret = method(self, **data) + assert isinstance(ret, dict) + return ret # Command functions (run in the server via RPC). @@ -506,7 +512,8 @@ def initialize_fine_grained( print_memory_profile(run_gc=False) - status = 1 if messages else 0 + __, n_notes, __ = count_stats(messages) + status = 1 if messages and n_notes < len(messages) else 0 messages = self.pretty_messages(messages, len(sources), is_tty, terminal_width) return {"out": "".join(s + "\n" for s in messages), "err": "", "status": status} @@ -586,7 +593,7 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources.extend(new_files) # Process changes directly reachable from roots. - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) # Follow deps from changed modules (still within graph). worklist = changed[:] @@ -603,13 +610,13 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l sources2, graph, seen, changed_paths ) self.update_sources(new_files) - messages = fine_grained_manager.update(changed, []) + messages = fine_grained_manager.update(changed, [], followed=True) worklist.extend(changed) t2 = time.time() def refresh_file(module: str, path: str) -> list[str]: - return fine_grained_manager.update([(module, path)], []) + return fine_grained_manager.update([(module, path)], [], followed=True) for module_id, state in list(graph.items()): new_messages = refresh_suppressed_submodules( @@ -626,10 +633,10 @@ def refresh_file(module: str, path: str) -> list[str]: new_unsuppressed = self.find_added_suppressed(graph, seen, manager.search_paths) if not new_unsuppressed: break - new_files = [BuildSource(mod[1], mod[0]) for mod in new_unsuppressed] + new_files = [BuildSource(mod[1], mod[0], followed=True) for mod in new_unsuppressed] sources.extend(new_files) self.update_sources(new_files) - messages = fine_grained_manager.update(new_unsuppressed, []) + messages = fine_grained_manager.update(new_unsuppressed, [], followed=True) for module_id, path in new_unsuppressed: new_messages = refresh_suppressed_submodules( @@ -711,7 +718,7 @@ def find_reachable_changed_modules( for dep in state.dependencies: if dep not in seen: seen.add(dep) - worklist.append(BuildSource(graph[dep].path, graph[dep].id)) + worklist.append(BuildSource(graph[dep].path, graph[dep].id, followed=True)) return changed, new_files def direct_imports( @@ -719,7 +726,7 @@ def direct_imports( ) -> list[BuildSource]: """Return the direct imports of module not included in seen.""" state = graph[module[0]] - return [BuildSource(graph[dep].path, dep) for dep in state.dependencies] + return [BuildSource(graph[dep].path, dep, followed=True) for dep in state.dependencies] def find_added_suppressed( self, graph: mypy.build.Graph, seen: set[str], search_paths: SearchPaths @@ -942,7 +949,7 @@ def cmd_hang(self) -> dict[str, object]: def get_meminfo() -> dict[str, Any]: res: dict[str, Any] = {} try: - import psutil # type: ignore # It's not in typeshed yet + import psutil except ImportError: res["memory_psutil_missing"] = ( "psutil not found, run pip install mypy[dmypy] " diff --git a/mypy/erasetype.py b/mypy/erasetype.py index 89c07186f44a..6533d0c4e0f9 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -176,8 +176,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Type alias target can't contain bound type variables, so - # it is safe to just erase the arguments. + # Type alias target can't contain bound type variables (not bound by the type + # alias itself), so it is safe to just erase the arguments. return t.copy_modified(args=[a.accept(self) for a in t.args]) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 955b30f915b5..3d8b1096ed4f 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -5,19 +5,30 @@ from __future__ import annotations +from collections import defaultdict from typing_extensions import Final error_codes: dict[str, ErrorCode] = {} +sub_code_map: dict[str, set[str]] = defaultdict(set) class ErrorCode: def __init__( - self, code: str, description: str, category: str, default_enabled: bool = True + self, + code: str, + description: str, + category: str, + default_enabled: bool = True, + sub_code_of: ErrorCode | None = None, ) -> None: self.code = code self.description = description self.category = category self.default_enabled = default_enabled + self.sub_code_of = sub_code_of + if sub_code_of is not None: + assert sub_code_of.sub_code_of is None, "Nested subcategories are not supported" + sub_code_map[sub_code_of.code].add(code) error_codes[code] = self def __str__(self) -> str: @@ -33,7 +44,9 @@ def __str__(self) -> str: CALL_OVERLOAD: Final = ErrorCode( "call-overload", "Check that an overload variant matches arguments", "General" ) -VALID_TYPE: Final = ErrorCode("valid-type", "Check that type (annotation) is valid", "General") +VALID_TYPE: Final[ErrorCode] = ErrorCode( + "valid-type", "Check that type (annotation) is valid", "General" +) VAR_ANNOTATED: Final = ErrorCode( "var-annotated", "Require variable annotation if type can't be inferred", "General" ) @@ -46,9 +59,15 @@ def __str__(self) -> str: RETURN_VALUE: Final[ErrorCode] = ErrorCode( "return-value", "Check that return value is compatible with signature", "General" ) -ASSIGNMENT: Final = ErrorCode( +ASSIGNMENT: Final[ErrorCode] = ErrorCode( "assignment", "Check that assigned value is compatible with target", "General" ) +METHOD_ASSIGN: Final[ErrorCode] = ErrorCode( + "method-assign", + "Check that assignment target is not a method", + "General", + sub_code_of=ASSIGNMENT, +) TYPE_ARG: Final = ErrorCode("type-arg", "Check that generic type arguments are present", "General") TYPE_VAR: Final = ErrorCode("type-var", "Check that type variable values are valid", "General") UNION_ATTR: Final = ErrorCode( @@ -65,6 +84,12 @@ def __str__(self) -> str: TYPEDDICT_ITEM: Final = ErrorCode( "typeddict-item", "Check items when constructing TypedDict", "General" ) +TYPEDDICT_UNKNOWN_KEY: Final = ErrorCode( + "typeddict-unknown-key", + "Check unknown keys when constructing TypedDict", + "General", + sub_code_of=TYPEDDICT_ITEM, +) HAS_TYPE: Final = ErrorCode( "has-type", "Check that type of reference can be determined", "General" ) @@ -78,6 +103,9 @@ def __str__(self) -> str: ABSTRACT: Final = ErrorCode( "abstract", "Prevent instantiation of classes with abstract attributes", "General" ) +TYPE_ABSTRACT: Final = ErrorCode( + "type-abstract", "Require only concrete classes where Type[...] is expected", "General" +) VALID_NEWTYPE: Final = ErrorCode( "valid-newtype", "Check that argument 2 to NewType is valid", "General" ) @@ -94,6 +122,16 @@ def __str__(self) -> str: UNUSED_COROUTINE: Final = ErrorCode( "unused-coroutine", "Ensure that all coroutines are used", "General" ) +# TODO: why do we need the explicit type here? Without it mypyc CI builds fail with +# mypy/message_registry.py:37: error: Cannot determine type of "EMPTY_BODY" [has-type] +EMPTY_BODY: Final[ErrorCode] = ErrorCode( + "empty-body", + "A dedicated error code to opt out return errors for empty/trivial bodies", + "General", +) +SAFE_SUPER: Final = ErrorCode( + "safe-super", "Warn about calls to abstract methods with empty/trivial bodies", "General" +) # These error codes aren't enabled by default. NO_UNTYPED_DEF: Final[ErrorCode] = ErrorCode( @@ -122,6 +160,15 @@ def __str__(self) -> str: UNREACHABLE: Final = ErrorCode( "unreachable", "Warn about unreachable statements or expressions", "General" ) +ANNOTATION_UNCHECKED = ErrorCode( + "annotation-unchecked", "Notify about type annotations in unchecked functions", "General" +) +POSSIBLY_UNDEFINED: Final[ErrorCode] = ErrorCode( + "possibly-undefined", + "Warn about variables that are defined only in some execution paths", + "General", + default_enabled=False, +) REDUNDANT_EXPR: Final = ErrorCode( "redundant-expr", "Warn about redundant expressions", "General", default_enabled=False ) @@ -131,6 +178,17 @@ def __str__(self) -> str: "General", default_enabled=False, ) +TRUTHY_FUNCTION: Final[ErrorCode] = ErrorCode( + "truthy-function", + "Warn about function that always evaluate to true in boolean contexts", + "General", +) +TRUTHY_ITERABLE: Final[ErrorCode] = ErrorCode( + "truthy-iterable", + "Warn about Iterable expressions that could always evaluate to true in boolean contexts", + "General", + default_enabled=False, +) NAME_MATCH: Final = ErrorCode( "name-match", "Check that type definition has consistent naming", "General" ) @@ -151,6 +209,15 @@ def __str__(self) -> str: "General", default_enabled=False, ) +REDUNDANT_SELF_TYPE = ErrorCode( + "redundant-self", + "Warn about redundant Self type annotations on method first argument", + "General", + default_enabled=False, +) +USED_BEFORE_DEF: Final[ErrorCode] = ErrorCode( + "used-before-def", "Warn about variables that are used before they are defined", "General" +) # Syntax errors are often blocking. diff --git a/mypy/errors.py b/mypy/errors.py index 7aa40a235c1e..2c2c1e5ca227 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -4,7 +4,7 @@ import sys import traceback from collections import defaultdict -from typing import Callable, NoReturn, Optional, TextIO, Tuple, TypeVar +from typing import Callable, Iterable, NoReturn, Optional, TextIO, Tuple, TypeVar from typing_extensions import Final, Literal, TypeAlias as _TypeAlias from mypy import errorcodes as codes @@ -17,11 +17,14 @@ T = TypeVar("T") +# Show error codes for some note-level messages (these usually appear alone +# and not as a comment for a previous error-level message). +SHOW_NOTE_CODES: Final = {codes.ANNOTATION_UNCHECKED} allowed_duplicates: Final = ["@overload", "Got:", "Expected:"] # Keep track of the original error code when the error code of a message is changed. # This is used to give notes about out-of-date "type: ignore" comments. -original_error_codes: Final = {codes.LITERAL_REQ: codes.MISC} +original_error_codes: Final = {codes.LITERAL_REQ: codes.MISC, codes.TYPE_ABSTRACT: codes.MISC} class ErrorInfo: @@ -75,7 +78,7 @@ class ErrorInfo: # Actual origin of the error message as tuple (path, line number, end line number) # If end line number is unknown, use line number. - origin: tuple[str, int, int] + origin: tuple[str, Iterable[int]] # Fine-grained incremental target where this was reported target: str | None = None @@ -101,7 +104,7 @@ def __init__( blocker: bool, only_once: bool, allow_dups: bool, - origin: tuple[str, int, int] | None = None, + origin: tuple[str, Iterable[int]] | None = None, target: str | None = None, ) -> None: self.import_ctx = import_ctx @@ -119,7 +122,7 @@ def __init__( self.blocker = blocker self.only_once = only_once self.allow_dups = allow_dups - self.origin = origin or (file, line, line) + self.origin = origin or (file, [line]) self.target = target @@ -257,18 +260,17 @@ def __init__( self, show_error_context: bool = False, show_column_numbers: bool = False, - show_error_codes: bool = False, + hide_error_codes: bool = False, pretty: bool = False, show_error_end: bool = False, read_source: Callable[[str], list[str] | None] | None = None, show_absolute_path: bool = False, - enabled_error_codes: set[ErrorCode] | None = None, - disabled_error_codes: set[ErrorCode] | None = None, many_errors_threshold: int = -1, + options: Options | None = None, ) -> None: self.show_error_context = show_error_context self.show_column_numbers = show_column_numbers - self.show_error_codes = show_error_codes + self.hide_error_codes = hide_error_codes self.show_absolute_path = show_absolute_path self.pretty = pretty self.show_error_end = show_error_end @@ -276,9 +278,8 @@ def __init__( assert show_column_numbers, "Inconsistent formatting, must be prevented by argparse" # We use fscache to read source code when showing snippets. self.read_source = read_source - self.enabled_error_codes = enabled_error_codes or set() - self.disabled_error_codes = disabled_error_codes or set() self.many_errors_threshold = many_errors_threshold + self.options = options self.initialize() def initialize(self) -> None: @@ -313,7 +314,9 @@ def simplify_path(self, file: str) -> str: file = os.path.normpath(file) return remove_path_prefix(file, self.ignore_prefix) - def set_file(self, file: str, module: str | None, scope: Scope | None = None) -> None: + def set_file( + self, file: str, module: str | None, options: Options, scope: Scope | None = None + ) -> None: """Set the path and module id of the current file.""" # The path will be simplified later, in render_messages. That way # * 'file' is always a key that uniquely identifies a source file @@ -324,6 +327,7 @@ def set_file(self, file: str, module: str | None, scope: Scope | None = None) -> self.file = file self.target_module = module self.scope = scope + self.options = options def set_file_ignored_lines( self, file: str, ignored_lines: dict[int, list[str]], ignore_all: bool = False @@ -363,7 +367,7 @@ def report( file: str | None = None, only_once: bool = False, allow_dups: bool = False, - origin_span: tuple[int, int] | None = None, + origin_span: Iterable[int] | None = None, offset: int = 0, end_line: int | None = None, end_column: int | None = None, @@ -407,7 +411,7 @@ def report( message = " " * offset + message if origin_span is None: - origin_span = (line, line) + origin_span = [line] if end_line is None: end_line = line @@ -430,7 +434,7 @@ def report( blocker, only_once, allow_dups, - origin=(self.file, *origin_span), + origin=(self.file, origin_span), target=self.current_target(), ) self.add_error_info(info) @@ -463,7 +467,7 @@ def _filter_error(self, file: str, info: ErrorInfo) -> bool: return False def add_error_info(self, info: ErrorInfo) -> None: - file, line, end_line = info.origin + file, lines = info.origin # process the stack of ErrorWatchers before modifying any internal state # in case we need to filter out the error entirely # NB: we need to do this both here and in _add_error_info, otherwise we @@ -474,7 +478,7 @@ def add_error_info(self, info: ErrorInfo) -> None: if file in self.ignored_lines: # Check each line in this context for "type: ignore" comments. # line == end_line for most nodes, so we only loop once. - for scope_line in range(line, end_line + 1): + for scope_line in lines: if self.is_ignored_error(scope_line, info, self.ignored_lines[file]): # Annotation requests us to ignore all errors on this line. self.used_ignored_lines[file][scope_line].append( @@ -582,14 +586,27 @@ def is_ignored_error(self, line: int, info: ErrorInfo, ignores: dict[int, list[s # Empty list means that we ignore all errors return True if info.code and self.is_error_code_enabled(info.code): - return info.code.code in ignores[line] + return ( + info.code.code in ignores[line] + or info.code.sub_code_of is not None + and info.code.sub_code_of.code in ignores[line] + ) return False def is_error_code_enabled(self, error_code: ErrorCode) -> bool: - if error_code in self.disabled_error_codes: + if self.options: + current_mod_disabled = self.options.disabled_error_codes + current_mod_enabled = self.options.enabled_error_codes + else: + current_mod_disabled = set() + current_mod_enabled = set() + + if error_code in current_mod_disabled: return False - elif error_code in self.enabled_error_codes: + elif error_code in current_mod_enabled: return True + elif error_code.sub_code_of is not None and error_code.sub_code_of in current_mod_disabled: + return False else: return error_code.default_enabled @@ -609,7 +626,10 @@ def clear_errors_in_targets(self, path: str, targets: set[str]) -> None: self.has_blockers.remove(path) def generate_unused_ignore_errors(self, file: str) -> None: - if is_typeshed_file(file) or file in self.ignored_files: + if ( + is_typeshed_file(self.options.abs_custom_typeshed_dir if self.options else None, file) + or file in self.ignored_files + ): return ignored_lines = self.ignored_lines[file] used_ignored_lines = self.used_ignored_lines[file] @@ -627,6 +647,10 @@ def generate_unused_ignore_errors(self, file: str) -> None: if len(ignored_codes) > 1 and len(unused_ignored_codes) > 0: unused_codes_message = f"[{', '.join(sorted(unused_ignored_codes))}]" message = f'Unused "type: ignore{unused_codes_message}" comment' + for unused in unused_ignored_codes: + narrower = set(used_ignored_codes) & codes.sub_code_map[unused] + if narrower: + message += f", use narrower [{', '.join(narrower)}] instead of [{unused}]" # Don't use report since add_error_info will ignore the error! info = ErrorInfo( self.import_context(), @@ -650,7 +674,10 @@ def generate_unused_ignore_errors(self, file: str) -> None: def generate_ignore_without_code_errors( self, file: str, is_warning_unused_ignores: bool ) -> None: - if is_typeshed_file(file) or file in self.ignored_files: + if ( + is_typeshed_file(self.options.abs_custom_typeshed_dir if self.options else None, file) + or file in self.ignored_files + ): return used_ignored_lines = self.used_ignored_lines[file] @@ -720,6 +747,24 @@ def is_errors_for_file(self, file: str) -> bool: """Are there any errors for the given file?""" return file in self.error_info_map + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + Return True if errors are not shown to user, i.e. errors are ignored + or they are collected for internal use only. + + If True, we should prefer to generate a simple message quickly. + All normal errors should still be reported. + """ + if self.file in self.ignored_files: + # Errors ignored, so no point generating fancy messages + return True + for _watcher in self._watchers: + if _watcher._filter is True and _watcher._filtered is None: + # Errors are filtered + return True + return False + def raise_error(self, use_stdout: bool = True) -> NoReturn: """Raise a CompileError with the generated messages. @@ -768,7 +813,11 @@ def format_messages( s = f"{srcloc}: {severity}: {message}" else: s = message - if self.show_error_codes and code and severity != "note": + if ( + not self.hide_error_codes + and code + and (severity != "note" or code in SHOW_NOTE_CODES) + ): # If note has an error code, it is related to a previous error. Avoid # displaying duplicate error codes. s = f"{s} [{code.code}]" diff --git a/mypy/evalexpr.py b/mypy/evalexpr.py new file mode 100644 index 000000000000..2bc6966fa2fa --- /dev/null +++ b/mypy/evalexpr.py @@ -0,0 +1,204 @@ +""" + +Evaluate an expression. + +Used by stubtest; in a separate file because things break if we don't +put it in a mypyc-compiled file. + +""" +import ast +from typing_extensions import Final + +import mypy.nodes +from mypy.visitor import ExpressionVisitor + +UNKNOWN = object() + + +class _NodeEvaluator(ExpressionVisitor[object]): + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> int: + return o.value + + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: + return o.value + + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> object: + # The value of a BytesExpr is a string created from the repr() + # of the bytes object. Get the original bytes back. + try: + return ast.literal_eval(f"b'{o.value}'") + except SyntaxError: + return ast.literal_eval(f'b"{o.value}"') + + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> float: + return o.value + + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> object: + return o.value + + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> object: + return Ellipsis + + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> object: + return UNKNOWN + + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> object: + if o.name == "True": + return True + elif o.name == "False": + return False + elif o.name == "None": + return None + # TODO: Handle more names by figuring out a way to hook into the + # symbol table. + return UNKNOWN + + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> object: + return UNKNOWN + + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> object: + return UNKNOWN + + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> object: + return UNKNOWN + + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> object: + return UNKNOWN + + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> object: + return UNKNOWN + + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> object: + return UNKNOWN + + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> object: + return o.expr.accept(self) + + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> object: + return o.expr.accept(self) + + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> object: + return UNKNOWN + + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> object: + return UNKNOWN + + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> object: + operand = o.expr.accept(self) + if operand is UNKNOWN: + return UNKNOWN + if o.op == "-": + if isinstance(operand, (int, float, complex)): + return -operand + elif o.op == "+": + if isinstance(operand, (int, float, complex)): + return +operand + elif o.op == "~": + if isinstance(operand, int): + return ~operand + elif o.op == "not": + if isinstance(operand, (bool, int, float, str, bytes)): + return not operand + return UNKNOWN + + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> object: + return o.value.accept(self) + + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return items + return UNKNOWN + + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> object: + items = [ + (UNKNOWN if key is None else key.accept(self), value.accept(self)) + for key, value in o.items + ] + if all(key is not UNKNOWN and value is not None for key, value in items): + return dict(items) + return UNKNOWN + + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return tuple(items) + return UNKNOWN + + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> object: + items = [item.accept(self) for item in o.items] + if all(item is not UNKNOWN for item in items): + return set(items) + return UNKNOWN + + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> object: + return UNKNOWN + + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> object: + return UNKNOWN + + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> object: + return UNKNOWN + + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> object: + return UNKNOWN + + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> object: + return UNKNOWN + + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> object: + return UNKNOWN + + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> object: + return UNKNOWN + + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> object: + return UNKNOWN + + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> object: + return UNKNOWN + + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> object: + return UNKNOWN + + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> object: + return UNKNOWN + + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> object: + return UNKNOWN + + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> object: + return UNKNOWN + + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> object: + return UNKNOWN + + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> object: + return UNKNOWN + + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> object: + return UNKNOWN + + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> object: + return UNKNOWN + + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> object: + return UNKNOWN + + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> object: + return UNKNOWN + + def visit_temp_node(self, o: mypy.nodes.TempNode) -> object: + return UNKNOWN + + +_evaluator: Final = _NodeEvaluator() + + +def evaluate_expression(expr: mypy.nodes.Expression) -> object: + """Evaluate an expression at runtime. + + Return the result of the expression, or UNKNOWN if the expression cannot be + evaluated. + """ + return expr.accept(_evaluator) diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 959983ae66d1..7933283b24d6 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -1,9 +1,14 @@ from __future__ import annotations -from typing import Iterable, Mapping, Sequence, TypeVar, cast +from typing import Iterable, Mapping, Sequence, TypeVar, cast, overload +from typing_extensions import Final +from mypy.nodes import ARG_POS, ARG_STAR, ArgKind, Var +from mypy.type_visitor import TypeTranslator from mypy.types import ( + ANY_STRATEGY, AnyType, + BoolTypeQuery, CallableType, DeletedType, ErasedType, @@ -13,7 +18,6 @@ NoneType, Overloaded, Parameters, - ParamSpecFlavor, ParamSpecType, PartialType, ProperType, @@ -21,7 +25,6 @@ Type, TypeAliasType, TypedDictType, - TypeList, TypeType, TypeVarId, TypeVarLikeType, @@ -32,23 +35,54 @@ UninhabitedType, UnionType, UnpackType, + expand_param_spec, + flatten_nested_unions, get_proper_type, + remove_trivial, ) -from mypy.typevartuples import split_with_instance, split_with_prefix_and_suffix +from mypy.typevartuples import ( + find_unpack_in_list, + split_with_instance, + split_with_prefix_and_suffix, +) + + +@overload +def expand_type( + typ: ProperType, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... +) -> ProperType: + ... + +@overload +def expand_type( + typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... +) -> Type: + ... -def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: + +def expand_type( + typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = False +) -> Type: """Substitute any type variable references in a type given by a type environment. """ - # TODO: use an overloaded signature? (ProperType stays proper after expansion.) - return typ.accept(ExpandTypeVisitor(env)) + return typ.accept(ExpandTypeVisitor(env, allow_erased_callables)) + + +@overload +def expand_type_by_instance(typ: ProperType, instance: Instance) -> ProperType: + ... + + +@overload +def expand_type_by_instance(typ: Type, instance: Instance) -> Type: + ... def expand_type_by_instance(typ: Type, instance: Instance) -> Type: """Substitute type variables in type using values from an Instance. Type variables are considered to be bound by the class declaration.""" - # TODO: use an overloaded signature? (ProperType stays proper after expansion.) if not instance.args: return typ else: @@ -63,7 +97,9 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type: instance.type.type_var_tuple_prefix, instance.type.type_var_tuple_suffix, ) - variables = {tvars_middle[0].id: TypeList(list(args_middle))} + tvar = tvars_middle[0] + assert isinstance(tvar, TypeVarTupleType) + variables = {tvar.id: TupleType(list(args_middle), tvar.tuple_fallback)} instance_args = args_prefix + args_suffix tvars = tvars_prefix + tvars_suffix else: @@ -87,7 +123,6 @@ def freshen_function_type_vars(callee: F) -> F: tvs = [] tvmap: dict[TypeVarId, Type] = {} for v in callee.variables: - # TODO(PEP612): fix for ParamSpecType if isinstance(v, TypeVarType): tv: TypeVarLikeType = TypeVarType.new_unification_variable(v) elif isinstance(v, TypeVarTupleType): @@ -106,13 +141,53 @@ def freshen_function_type_vars(callee: F) -> F: return cast(F, fresh_overload) +class HasGenericCallable(BoolTypeQuery): + def __init__(self) -> None: + super().__init__(ANY_STRATEGY) + + def visit_callable_type(self, t: CallableType) -> bool: + return t.is_generic() or super().visit_callable_type(t) + + +# Share a singleton since this is performance sensitive +has_generic_callable: Final = HasGenericCallable() + + +T = TypeVar("T", bound=Type) + + +def freshen_all_functions_type_vars(t: T) -> T: + result: Type + has_generic_callable.reset() + if not t.accept(has_generic_callable): + return t # Fast path to avoid expensive freshening + else: + result = t.accept(FreshenCallableVisitor()) + assert isinstance(result, type(t)) + return result + + +class FreshenCallableVisitor(TypeTranslator): + def visit_callable_type(self, t: CallableType) -> Type: + result = super().visit_callable_type(t) + assert isinstance(result, ProperType) and isinstance(result, CallableType) + return freshen_function_type_vars(result) + + def visit_type_alias_type(self, t: TypeAliasType) -> Type: + # Same as for ExpandTypeVisitor + return t.copy_modified(args=[arg.accept(self) for arg in t.args]) + + class ExpandTypeVisitor(TypeVisitor[Type]): """Visitor that substitutes type variables with values.""" variables: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value - def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: + def __init__( + self, variables: Mapping[TypeVarId, Type], allow_erased_callables: bool = False + ) -> None: self.variables = variables + self.allow_erased_callables = allow_erased_callables def visit_unbound_type(self, t: UnboundType) -> Type: return t @@ -130,17 +205,27 @@ def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: - # Should not get here. - raise RuntimeError() + if not self.allow_erased_callables: + raise RuntimeError() + # This may happen during type inference if some function argument + # type is a generic callable, and its erased form will appear in inferred + # constraints, then solver may check subtyping between them, which will trigger + # unify_generic_callables(), this is why we can get here. In all other cases it + # is a sign of a bug, since should never appear in any stored types. + return t def visit_instance(self, t: Instance) -> Type: args = self.expand_types_with_unpack(list(t.args)) if isinstance(args, list): - return Instance(t.type, args, t.line, t.column) + return t.copy_modified(args=args) else: return args def visit_type_var(self, t: TypeVarType) -> Type: + # Normally upper bounds can't contain other type variables, the only exception is + # special type variable Self`0 <: C[T, S], where C is the class where Self is used. + if t.id.raw_id == 0: + t = t.copy_modified(upper_bound=t.upper_bound.accept(self)) repl = self.variables.get(t.id, t) if isinstance(repl, ProperType) and isinstance(repl, Instance): # TODO: do we really need to do this? @@ -154,32 +239,8 @@ def visit_param_spec(self, t: ParamSpecType) -> Type: # TODO: what does prefix mean in this case? # TODO: why does this case even happen? Instances aren't plural. return repl - elif isinstance(repl, ParamSpecType): - return repl.copy_modified( - flavor=t.flavor, - prefix=t.prefix.copy_modified( - arg_types=t.prefix.arg_types + repl.prefix.arg_types, - arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, - arg_names=t.prefix.arg_names + repl.prefix.arg_names, - ), - ) - elif isinstance(repl, Parameters) or isinstance(repl, CallableType): - # if the paramspec is *P.args or **P.kwargs: - if t.flavor != ParamSpecFlavor.BARE: - assert isinstance(repl, CallableType), "Should not be able to get here." - # Is this always the right thing to do? - param_spec = repl.param_spec() - if param_spec: - return param_spec.with_flavor(t.flavor) - else: - return repl - else: - return Parameters( - t.prefix.arg_types + repl.arg_types, - t.prefix.arg_kinds + repl.arg_kinds, - t.prefix.arg_names + repl.arg_names, - variables=[*t.prefix.variables, *repl.variables], - ) + elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): + return expand_param_spec(t, repl) else: # TODO: should this branch be removed? better not to fail silently return repl @@ -196,35 +257,95 @@ def visit_unpack_type(self, t: UnpackType) -> Type: assert False, "Mypy bug: unpacking must happen at a higher level" def expand_unpack(self, t: UnpackType) -> list[Type] | Instance | AnyType | None: - """May return either a list of types to unpack to, any, or a single - variable length tuple. The latter may not be valid in all contexts. - """ - if isinstance(t.type, TypeVarTupleType): - repl = get_proper_type(self.variables.get(t.type.id, t)) - if isinstance(repl, TupleType): - return repl.items - if isinstance(repl, TypeList): - return repl.items - elif isinstance(repl, Instance) and repl.type.fullname == "builtins.tuple": - return repl - elif isinstance(repl, AnyType): - # tuple[Any, ...] would be better, but we don't have - # the type info to construct that type here. - return repl - elif isinstance(repl, TypeVarTupleType): - return [UnpackType(typ=repl)] - elif isinstance(repl, UnpackType): - return [repl] - elif isinstance(repl, UninhabitedType): - return None - else: - raise NotImplementedError(f"Invalid type replacement to expand: {repl}") - else: - raise NotImplementedError(f"Invalid type to expand: {t.type}") + return expand_unpack_with_variables(t, self.variables) def visit_parameters(self, t: Parameters) -> Type: return t.copy_modified(arg_types=self.expand_types(t.arg_types)) + def interpolate_args_for_unpack( + self, t: CallableType, var_arg: UnpackType + ) -> tuple[list[str | None], list[ArgKind], list[Type]]: + star_index = t.arg_kinds.index(ARG_STAR) + + # We have something like Unpack[Tuple[X1, X2, Unpack[Ts], Y1, Y2]] + if isinstance(get_proper_type(var_arg.type), TupleType): + expanded_tuple = get_proper_type(var_arg.type.accept(self)) + # TODO: handle the case that expanded_tuple is a variable length tuple. + assert isinstance(expanded_tuple, TupleType) + expanded_items = expanded_tuple.items + else: + expanded_items_res = self.expand_unpack(var_arg) + if isinstance(expanded_items_res, list): + expanded_items = expanded_items_res + elif ( + isinstance(expanded_items_res, Instance) + and expanded_items_res.type.fullname == "builtins.tuple" + ): + # TODO: We shouldnt't simply treat this as a *arg because of suffix handling + # (there cannot be positional args after a *arg) + arg_types = ( + t.arg_types[:star_index] + + [expanded_items_res.args[0]] + + t.arg_types[star_index + 1 :] + ) + return (t.arg_names, t.arg_kinds, arg_types) + else: + return (t.arg_names, t.arg_kinds, t.arg_types) + + expanded_unpack_index = find_unpack_in_list(expanded_items) + # This is the case where we just have Unpack[Tuple[X1, X2, X3]] + # (for example if either the tuple had no unpacks, or the unpack in the + # tuple got fully expanded to something with fixed length) + if expanded_unpack_index is None: + arg_names = ( + t.arg_names[:star_index] + + [None] * len(expanded_items) + + t.arg_names[star_index + 1 :] + ) + arg_kinds = ( + t.arg_kinds[:star_index] + + [ARG_POS] * len(expanded_items) + + t.arg_kinds[star_index + 1 :] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + else: + # If Unpack[Ts] simplest form still has an unpack or is a + # homogenous tuple, then only the prefix can be represented as + # positional arguments, and we pass Tuple[Unpack[Ts-1], Y1, Y2] + # as the star arg, for example. + expanded_unpack = get_proper_type(expanded_items[expanded_unpack_index]) + assert isinstance(expanded_unpack, UnpackType) + + # Extract the typevartuple so we can get a tuple fallback from it. + expanded_unpacked_tvt = get_proper_type(expanded_unpack.type) + assert isinstance(expanded_unpacked_tvt, TypeVarTupleType) + + prefix_len = expanded_unpack_index + arg_names = t.arg_names[:star_index] + [None] * prefix_len + t.arg_names[star_index:] + arg_kinds = ( + t.arg_kinds[:star_index] + [ARG_POS] * prefix_len + t.arg_kinds[star_index:] + ) + arg_types = ( + self.expand_types(t.arg_types[:star_index]) + + expanded_items[:prefix_len] + # Constructing the Unpack containing the tuple without the prefix. + + [ + UnpackType( + TupleType( + expanded_items[prefix_len:], expanded_unpacked_tvt.tuple_fallback + ) + ) + if len(expanded_items) - prefix_len > 1 + else expanded_items[0] + ] + + self.expand_types(t.arg_types[star_index + 1 :]) + ) + return (arg_names, arg_kinds, arg_types) + def visit_callable_type(self, t: CallableType) -> Type: param_spec = t.param_spec() if param_spec is not None: @@ -250,8 +371,18 @@ def visit_callable_type(self, t: CallableType) -> Type: type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) + var_arg = t.var_arg() + if var_arg is not None and isinstance(var_arg.typ, UnpackType): + arg_names, arg_kinds, arg_types = self.interpolate_args_for_unpack(t, var_arg.typ) + else: + arg_names = t.arg_names + arg_kinds = t.arg_kinds + arg_types = self.expand_types(t.arg_types) + return t.copy_modified( - arg_types=self.expand_types(t.arg_types), + arg_types=arg_types, + arg_names=arg_names, + arg_kinds=arg_kinds, ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) @@ -318,11 +449,13 @@ def visit_literal_type(self, t: LiteralType) -> Type: return t def visit_union_type(self, t: UnionType) -> Type: - # After substituting for type variables in t.items, - # some of the resulting types might be subtypes of others. - from mypy.typeops import make_simplified_union # asdf - - return make_simplified_union(self.expand_types(t.items), t.line, t.column) + expanded = self.expand_types(t.items) + # After substituting for type variables in t.items, some resulting types + # might be subtypes of others, however calling make_simplified_union() + # can cause recursion, so we just remove strict duplicates. + return UnionType.make_union( + remove_trivial(flatten_nested_unions(expanded)), t.line, t.column + ) def visit_partial_type(self, t: PartialType) -> Type: return t @@ -335,8 +468,8 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(item) def visit_type_alias_type(self, t: TypeAliasType) -> Type: - # Target of the type alias cannot contain type variables, - # so we just expand the arguments. + # Target of the type alias cannot contain type variables (not bound by the type + # alias itself), so we just expand the arguments. return t.copy_modified(args=self.expand_types(t.args)) def expand_types(self, types: Iterable[Type]) -> list[Type]: @@ -344,3 +477,48 @@ def expand_types(self, types: Iterable[Type]) -> list[Type]: for t in types: a.append(t.accept(self)) return a + + +def expand_unpack_with_variables( + t: UnpackType, variables: Mapping[TypeVarId, Type] +) -> list[Type] | Instance | AnyType | None: + """May return either a list of types to unpack to, any, or a single + variable length tuple. The latter may not be valid in all contexts. + """ + if isinstance(t.type, TypeVarTupleType): + repl = get_proper_type(variables.get(t.type.id, t)) + if isinstance(repl, TupleType): + return repl.items + elif isinstance(repl, Instance) and repl.type.fullname == "builtins.tuple": + return repl + elif isinstance(repl, AnyType): + # tuple[Any, ...] would be better, but we don't have + # the type info to construct that type here. + return repl + elif isinstance(repl, TypeVarTupleType): + return [UnpackType(typ=repl)] + elif isinstance(repl, UnpackType): + return [repl] + elif isinstance(repl, UninhabitedType): + return None + else: + raise NotImplementedError(f"Invalid type replacement to expand: {repl}") + else: + raise NotImplementedError(f"Invalid type to expand: {t.type}") + + +@overload +def expand_self_type(var: Var, typ: ProperType, replacement: ProperType) -> ProperType: + ... + + +@overload +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + ... + + +def expand_self_type(var: Var, typ: Type, replacement: Type) -> Type: + """Expand appearances of Self type in a variable type.""" + if var.info.self_type is not None and not var.is_property: + return expand_type(typ, {var.info.self_type.id: replacement}) + return typ diff --git a/mypy/fastparse.py b/mypy/fastparse.py index 2f749af6a467..ef1fdf61af2e 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -212,6 +212,10 @@ def ast3_parse( MatchAs = Any MatchOr = Any AstNode = Union[ast3.expr, ast3.stmt, ast3.ExceptHandler] + if sys.version_info >= (3, 11): + TryStar = ast3.TryStar + else: + TryStar = Any except ImportError: try: from typed_ast import ast35 # type: ignore[attr-defined] # noqa: F401 @@ -258,15 +262,17 @@ def parse( on failure. Otherwise, use the errors object to report parse errors. """ raise_on_error = False - if errors is None: - errors = Errors() - raise_on_error = True if options is None: options = Options() - errors.set_file(fnam, module) + if errors is None: + errors = Errors(hide_error_codes=options.hide_error_codes) + raise_on_error = True + errors.set_file(fnam, module, options=options) is_stub_file = fnam.endswith(".pyi") if is_stub_file: feature_version = defaults.PYTHON3_VERSION[1] + if options.python_version[0] == 3 and options.python_version[1] > feature_version: + feature_version = options.python_version[1] else: assert options.python_version[0] >= 3 feature_version = options.python_version[1] @@ -303,6 +309,7 @@ def parse( if raise_on_error and errors.is_errors(): errors.raise_error() + assert isinstance(tree, MypyFile) return tree @@ -480,6 +487,16 @@ def translate_stmt_list( and self.type_ignores and min(self.type_ignores) < self.get_lineno(stmts[0]) ): + if self.type_ignores[min(self.type_ignores)]: + self.fail( + ( + "type ignore with error code is not supported for modules; " + "use `# mypy: disable-error-code=...`" + ), + line=min(self.type_ignores), + column=0, + blocker=False, + ) self.errors.used_ignored_lines[self.errors.file][min(self.type_ignores)].append( codes.FILE.code ) @@ -731,7 +748,7 @@ def _check_ifstmt_for_overloads( if stmt.else_body is None: return overload_name - if isinstance(stmt.else_body, Block) and len(stmt.else_body.body) == 1: + if len(stmt.else_body.body) == 1: # For elif: else_body contains an IfStmt itself -> do a recursive check. if ( isinstance(stmt.else_body.body[0], (Decorator, FuncDef, OverloadedFuncDef)) @@ -828,7 +845,7 @@ def visit_Module(self, mod: ast3.Module) -> MypyFile: if parsed is not None: self.type_ignores[ti.lineno] = parsed else: - self.fail(INVALID_TYPE_IGNORE, ti.lineno, -1) + self.fail(INVALID_TYPE_IGNORE, ti.lineno, -1, blocker=False) body = self.fix_function_overloads(self.translate_stmt_list(mod.body, ismodule=True)) return MypyFile(body, self.imports, False, self.type_ignores) @@ -888,13 +905,11 @@ def do_func_def( # PEP 484 disallows both type annotations and type comments if n.returns or any(a.type_annotation is not None for a in args): self.fail(message_registry.DUPLICATE_TYPE_SIGNATURES, lineno, n.col_offset) - translated_args = TypeConverter( + translated_args: list[Type] = TypeConverter( self.errors, line=lineno, override_column=n.col_offset ).translate_expr_list(func_type_ast.argtypes) - arg_types = [ - a if a is not None else AnyType(TypeOfAny.unannotated) - for a in translated_args - ] + # Use a cast to work around `list` invariance + arg_types = cast(List[Optional[Type]], translated_args) return_type = TypeConverter(self.errors, line=lineno).visit(func_type_ast.returns) # add implicit self type @@ -923,7 +938,7 @@ def do_func_def( if any(arg_types) or return_type: if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): self.fail( - "Ellipses cannot accompany other argument types " "in function type signature", + "Ellipses cannot accompany other argument types in function type signature", lineno, n.col_offset, ) @@ -992,7 +1007,7 @@ def do_func_def( return retval def set_type_optional(self, type: Type | None, initializer: Expression | None) -> None: - if self.options.no_implicit_optional: + if not self.options.implicit_optional: return # Indicate that type should be wrapped in an Optional if arg is initialized to None. optional = isinstance(initializer, NameExpr) and initializer.name == "None" @@ -1066,7 +1081,14 @@ def make_argument( if argument_elide_name(arg.arg): pos_only = True - return Argument(Var(arg.arg), arg_type, self.visit(default), kind, pos_only) + argument = Argument(Var(arg.arg), arg_type, self.visit(default), kind, pos_only) + argument.set_line( + arg.lineno, + arg.col_offset, + getattr(arg, "end_lineno", None), + getattr(arg, "end_col_offset", None), + ) + return argument def fail_arg(self, msg: str, arg: ast3.arg) -> None: self.fail(msg, arg.lineno, arg.col_offset) @@ -1238,6 +1260,24 @@ def visit_Try(self, n: ast3.Try) -> TryStmt: ) return self.set_line(node, n) + def visit_TryStar(self, n: TryStar) -> TryStmt: + vs = [ + self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers + ] + types = [self.visit(h.type) for h in n.handlers] + handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] + + node = TryStmt( + self.as_required_block(n.body, n.lineno), + vs, + types, + handlers, + self.as_block(n.orelse, n.lineno), + self.as_block(n.finalbody, n.lineno), + ) + node.is_star = True + return self.set_line(node, n) + # Assert(expr test, expr? msg) def visit_Assert(self, n: ast3.Assert) -> AssertStmt: node = AssertStmt(self.visit(n.test), self.visit(n.msg)) @@ -1630,7 +1670,9 @@ def visit_ExtSlice(self, n: ast3.ExtSlice) -> TupleExpr: # Index(expr value) def visit_Index(self, n: Index) -> Node: # cast for mypyc's benefit on Python 3.9 - return self.visit(cast(Any, n).value) + value = self.visit(cast(Any, n).value) + assert isinstance(value, Node) + return value # Match(expr subject, match_case* cases) # python 3.10 and later def visit_Match(self, n: Match) -> MatchStmt: @@ -1760,7 +1802,9 @@ def visit(self, node: AST | None) -> ProperType | None: method = "visit_" + node.__class__.__name__ visitor = getattr(self, method, None) if visitor is not None: - return visitor(node) + typ = visitor(node) + assert isinstance(typ, ProperType) + return typ else: return self.invalid_type(node) finally: @@ -1947,7 +1991,9 @@ def visit_Bytes(self, n: Bytes) -> Type: def visit_Index(self, n: ast3.Index) -> Type: # cast for mypyc's benefit on Python 3.9 - return self.visit(cast(Any, n).value) + value = self.visit(cast(Any, n).value) + assert isinstance(value, Type) + return value def visit_Slice(self, n: ast3.Slice) -> Type: return self.invalid_type(n, note="did you mean to use ',' instead of ':' ?") @@ -1971,9 +2017,10 @@ def visit_Subscript(self, n: ast3.Subscript) -> Type: for s in dims: if getattr(s, "col_offset", None) is None: if isinstance(s, ast3.Index): - s.col_offset = s.value.col_offset # type: ignore + s.col_offset = s.value.col_offset # type: ignore[attr-defined] elif isinstance(s, ast3.Slice): - s.col_offset = s.lower.col_offset # type: ignore + assert s.lower is not None + s.col_offset = s.lower.col_offset # type: ignore[attr-defined] sliceval = ast3.Tuple(dims, n.ctx) empty_tuple_index = False diff --git a/mypy/fixup.py b/mypy/fixup.py index 7f7c3129005c..7b0f5f433d72 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -13,10 +13,12 @@ FuncDef, MypyFile, OverloadedFuncDef, + ParamSpecExpr, SymbolTable, TypeAlias, TypeInfo, TypeVarExpr, + TypeVarTupleExpr, Var, ) from mypy.types import ( @@ -78,13 +80,24 @@ def visit_type_info(self, info: TypeInfo) -> None: if info.tuple_type: info.tuple_type.accept(self.type_fixer) info.update_tuple_type(info.tuple_type) + if info.special_alias: + info.special_alias.alias_tvars = list(info.defn.type_vars) if info.typeddict_type: info.typeddict_type.accept(self.type_fixer) info.update_typeddict_type(info.typeddict_type) + if info.special_alias: + info.special_alias.alias_tvars = list(info.defn.type_vars) if info.declared_metaclass: info.declared_metaclass.accept(self.type_fixer) if info.metaclass_type: info.metaclass_type.accept(self.type_fixer) + if info.alt_promote: + info.alt_promote.accept(self.type_fixer) + instance = Instance(info, []) + # Hack: We may also need to add a backwards promotion (from int to native int), + # since it might not be serialized. + if instance not in info.alt_promote.type._promote: + info.alt_promote.type._promote.append(instance) if info._mro_refs: info.mro = [ lookup_fully_qualified_typeinfo( @@ -164,6 +177,12 @@ def visit_type_var_expr(self, tv: TypeVarExpr) -> None: value.accept(self.type_fixer) tv.upper_bound.accept(self.type_fixer) + def visit_paramspec_expr(self, p: ParamSpecExpr) -> None: + p.upper_bound.accept(self.type_fixer) + + def visit_type_var_tuple_expr(self, tv: TypeVarTupleExpr) -> None: + tv.upper_bound.accept(self.type_fixer) + def visit_var(self, v: Var) -> None: if self.current_info is not None: v.info = self.current_info @@ -172,6 +191,8 @@ def visit_var(self, v: Var) -> None: def visit_type_alias(self, a: TypeAlias) -> None: a.target.accept(self.type_fixer) + for v in a.alias_tvars: + v.accept(self.type_fixer) class TypeFixer(TypeVisitor[None]): diff --git a/mypy/ipc.py b/mypy/ipc.py index 8e693169ab36..f07616df0fd0 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -89,9 +89,6 @@ def write(self, data: bytes) -> None: if sys.platform == "win32": try: ov, err = _winapi.WriteFile(self.connection, data, overlapped=True) - # TODO: remove once typeshed supports Literal types - assert isinstance(ov, _winapi.Overlapped) - assert isinstance(err, int) try: if err == _winapi.ERROR_IO_PENDING: timeout = int(self.timeout * 1000) if self.timeout else _winapi.INFINITE @@ -217,8 +214,6 @@ def __enter__(self) -> IPCServer: # client never connects, though this can be "solved" by killing the server try: ov = _winapi.ConnectNamedPipe(self.connection, overlapped=True) - # TODO: remove once typeshed supports Literal types - assert isinstance(ov, _winapi.Overlapped) except OSError as e: # Don't raise if the client already exists, or the client already connected if e.winerror not in (_winapi.ERROR_PIPE_CONNECTED, _winapi.ERROR_NO_DATA): @@ -252,7 +247,7 @@ def __exit__( # Wait for the client to finish reading the last write before disconnecting if not FlushFileBuffers(self.connection): raise IPCException( - "Failed to flush NamedPipe buffer," "maybe the client hung up?" + "Failed to flush NamedPipe buffer, maybe the client hung up?" ) finally: DisconnectNamedPipe(self.connection) @@ -270,4 +265,6 @@ def connection_name(self) -> str: if sys.platform == "win32": return self.name else: - return self.sock.getsockname() + name = self.sock.getsockname() + assert isinstance(name, str) + return name diff --git a/mypy/join.py b/mypy/join.py index 123488c54ef6..62d256f4440f 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -2,11 +2,14 @@ from __future__ import annotations +from typing import overload + import mypy.typeops from mypy.maptype import map_instance_to_supertype from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT from mypy.state import state from mypy.subtypes import ( + SubtypeContext, find_member, is_equivalent, is_proper_subtype, @@ -99,7 +102,9 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: assert new_type is not None args.append(new_type) result: ProperType = Instance(t.type, args) - elif t.type.bases and is_subtype(t, s, ignore_type_params=True): + elif t.type.bases and is_proper_subtype( + t, s, subtype_context=SubtypeContext(ignore_type_params=True) + ): result = self.join_instances_via_supertype(t, s) else: # Now t is not a subtype of s, and t != s. Now s could be a subtype @@ -131,7 +136,6 @@ def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: best = res assert best is not None for promote in t.type._promote: - promote = get_proper_type(promote) if isinstance(promote, Instance): res = self.join_instances(promote, s) if is_better(res, best): @@ -140,8 +144,11 @@ def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: - """Return a simple least upper bound given the declared type.""" - # TODO: check infinite recursion for aliases here. + """Return a simple least upper bound given the declared type. + + This function should be only used by binder, and should not recurse. + For all other uses, use `join_types()`. + """ declaration = get_proper_type(declaration) s = get_proper_type(s) t = get_proper_type(t) @@ -157,10 +164,10 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: if isinstance(s, ErasedType): return t - if is_proper_subtype(s, t): + if is_proper_subtype(s, t, ignore_promotions=True): return t - if is_proper_subtype(t, s): + if is_proper_subtype(t, s, ignore_promotions=True): return s if isinstance(declaration, UnionType): @@ -172,6 +179,12 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: if isinstance(s, UninhabitedType) and not isinstance(t, UninhabitedType): s, t = t, s + # Meets/joins require callable type normalization. + s, t = normalize_callables(s, t) + + if isinstance(s, UnionType) and not isinstance(t, UnionType): + s, t = t, s + value = t.accept(TypeJoinVisitor(s)) if declaration is None or is_subtype(value, declaration): return value @@ -179,17 +192,29 @@ def join_simple(declaration: Type | None, s: Type, t: Type) -> ProperType: return declaration -def trivial_join(s: Type, t: Type) -> ProperType: +def trivial_join(s: Type, t: Type) -> Type: """Return one of types (expanded) if it is a supertype of other, otherwise top type.""" if is_subtype(s, t): - return get_proper_type(t) + return t elif is_subtype(t, s): - return get_proper_type(s) + return s else: return object_or_any_from_type(get_proper_type(t)) -def join_types(s: Type, t: Type, instance_joiner: InstanceJoiner | None = None) -> ProperType: +@overload +def join_types( + s: ProperType, t: ProperType, instance_joiner: InstanceJoiner | None = None +) -> ProperType: + ... + + +@overload +def join_types(s: Type, t: Type, instance_joiner: InstanceJoiner | None = None) -> Type: + ... + + +def join_types(s: Type, t: Type, instance_joiner: InstanceJoiner | None = None) -> Type: """Return the least upper bound of s and t. For example, the join of 'int' and 'object' is 'object'. @@ -229,6 +254,9 @@ def join_types(s: Type, t: Type, instance_joiner: InstanceJoiner | None = None) elif isinstance(t, PlaceholderType): return AnyType(TypeOfAny.from_error) + # Meets/joins require callable type normalization. + s, t = normalize_callables(s, t) + # Use a visitor to handle non-trivial cases. return t.accept(TypeJoinVisitor(s, instance_joiner)) @@ -437,7 +465,7 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: if self.s.length() == t.length(): items: list[Type] = [] for i in range(t.length()): - items.append(self.join(t.items[i], self.s.items[i])) + items.append(join_types(t.items[i], self.s.items[i])) return TupleType(items, fallback) else: return fallback @@ -481,7 +509,7 @@ def visit_partial_type(self, t: PartialType) -> ProperType: def visit_type_type(self, t: TypeType) -> ProperType: if isinstance(self.s, TypeType): - return TypeType.make_normalized(self.join(t.item, self.s.item), line=t.line) + return TypeType.make_normalized(join_types(t.item, self.s.item), line=t.line) elif isinstance(self.s, Instance) and self.s.type.fullname == "builtins.type": return self.s else: @@ -490,9 +518,6 @@ def visit_type_type(self, t: TypeType) -> ProperType: def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: assert False, f"This should be never called, got {t}" - def join(self, s: Type, t: Type) -> ProperType: - return join_types(s, t) - def default(self, typ: Type) -> ProperType: typ = get_proper_type(typ) if isinstance(typ, Instance): @@ -528,6 +553,14 @@ def is_better(t: Type, s: Type) -> bool: return False +def normalize_callables(s: ProperType, t: ProperType) -> tuple[ProperType, ProperType]: + if isinstance(s, (CallableType, Overloaded)): + s = s.with_unpacked_kwargs() + if isinstance(t, (CallableType, Overloaded)): + t = t.with_unpacked_kwargs() + return s, t + + def is_similar_callables(t: CallableType, s: CallableType) -> bool: """Return True if t and s have identical numbers of arguments, default arguments and varargs. @@ -545,10 +578,10 @@ def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: arg_types: list[Type] = [] for i in range(len(t.arg_types)): arg_types.append(meet_types(t.arg_types[i], s.arg_types[i])) - # TODO in combine_similar_callables also applies here (names and kinds) - # The fallback type can be either 'function' or 'type'. The result should have 'type' as - # fallback only if both operands have it as 'type'. - if t.fallback.type.fullname != "builtins.type": + # TODO in combine_similar_callables also applies here (names and kinds; user metaclasses) + # The fallback type can be either 'function', 'type', or some user-provided metaclass. + # The result should always use 'function' as a fallback if either operands are using it. + if t.fallback.type.fullname == "builtins.function": fallback = t.fallback else: fallback = s.fallback @@ -566,9 +599,10 @@ def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO kinds and argument names - # The fallback type can be either 'function' or 'type'. The result should have 'type' as - # fallback only if both operands have it as 'type'. - if t.fallback.type.fullname != "builtins.type": + # TODO what should happen if one fallback is 'type' and the other is a user-provided metaclass? + # The fallback type can be either 'function', 'type', or some user-provided metaclass. + # The result should always use 'function' as a fallback if either operands are using it. + if t.fallback.type.fullname == "builtins.function": fallback = t.fallback else: fallback = s.fallback @@ -639,19 +673,19 @@ def object_or_any_from_type(typ: ProperType) -> ProperType: return AnyType(TypeOfAny.implementation_artifact) -def join_type_list(types: list[Type]) -> ProperType: +def join_type_list(types: list[Type]) -> Type: if not types: # This is a little arbitrary but reasonable. Any empty tuple should be compatible # with all variable length tuples, and this makes it possible. return UninhabitedType() - joined = get_proper_type(types[0]) + joined = types[0] for t in types[1:]: joined = join_types(joined, t) return joined -def unpack_callback_protocol(t: Instance) -> Type | None: +def unpack_callback_protocol(t: Instance) -> ProperType | None: assert t.type.is_protocol if t.type.protocol_members == ["__call__"]: - return find_member("__call__", t, t, is_operator=True) + return get_proper_type(find_member("__call__", t, t, is_operator=True)) return None diff --git a/mypy/literals.py b/mypy/literals.py index 43425755aae8..9d91cf728b06 100644 --- a/mypy/literals.py +++ b/mypy/literals.py @@ -173,7 +173,7 @@ def visit_op_expr(self, e: OpExpr) -> Key: return ("Binary", e.op, literal_hash(e.left), literal_hash(e.right)) def visit_comparison_expr(self, e: ComparisonExpr) -> Key: - rest: Any = tuple(e.operators) + rest: tuple[str | Key | None, ...] = tuple(e.operators) rest += tuple(literal_hash(o) for o in e.operands) return ("Comparison",) + rest @@ -182,7 +182,7 @@ def visit_unary_expr(self, e: UnaryExpr) -> Key: def seq_expr(self, e: ListExpr | TupleExpr | SetExpr, name: str) -> Key | None: if all(literal(x) == LITERAL_YES for x in e.items): - rest: Any = tuple(literal_hash(x) for x in e.items) + rest: tuple[Key | None, ...] = tuple(literal_hash(x) for x in e.items) return (name,) + rest return None @@ -191,7 +191,7 @@ def visit_list_expr(self, e: ListExpr) -> Key | None: def visit_dict_expr(self, e: DictExpr) -> Key | None: if all(a and literal(a) == literal(b) == LITERAL_YES for a, b in e.items): - rest: Any = tuple( + rest: tuple[Key | None, ...] = tuple( (literal_hash(a) if a else None, literal_hash(b)) for a, b in e.items ) return ("Dict",) + rest diff --git a/mypy/main.py b/mypy/main.py index 7388e9a375ff..47dea2ae9797 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -18,7 +18,7 @@ from mypy.find_sources import InvalidSourceList, create_source_list from mypy.fscache import FileSystemCache from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths, get_search_dirs, mypy_path -from mypy.options import BuildType, Options +from mypy.options import INCOMPLETE_FEATURES, BuildType, Options from mypy.split_namespace import SplitNamespace from mypy.version import __version__ @@ -67,7 +67,7 @@ def main( if clean_exit: options.fast_exit = False - formatter = util.FancyFormatter(stdout, stderr, options.show_error_codes) + formatter = util.FancyFormatter(stdout, stderr, options.hide_error_codes) if options.install_types and (stdout is not sys.stdout or stderr is not sys.stderr): # Since --install-types performs user input, we want regular stdout and stderr. @@ -110,10 +110,10 @@ def main( print_memory_profile() code = 0 - if messages: + n_errors, n_notes, n_files = util.count_stats(messages) + if messages and n_notes < len(messages): code = 2 if blockers else 1 if options.error_summary: - n_errors, n_notes, n_files = util.count_stats(messages) if n_errors: summary = formatter.format_error( n_errors, n_files, len(sources), blockers=blockers, use_color=options.color_output @@ -151,7 +151,7 @@ def run_build( stdout: TextIO, stderr: TextIO, ) -> tuple[build.BuildResult | None, list[str], bool]: - formatter = util.FancyFormatter(stdout, stderr, options.show_error_codes) + formatter = util.FancyFormatter(stdout, stderr, options.hide_error_codes) messages = [] @@ -544,8 +544,9 @@ def add_invertible_flag( title="Import discovery", description="Configure how imports are discovered and followed." ) add_invertible_flag( - "--namespace-packages", - default=False, + "--no-namespace-packages", + dest="namespace_packages", + default=True, help="Support namespace packages (PEP 420, __init__.py-less)", group=imports_group, ) @@ -657,7 +658,7 @@ def add_invertible_flag( "--disallow-any-generics", default=False, strict_flag=True, - help="Disallow usage of generic types that do not specify explicit type " "parameters", + help="Disallow usage of generic types that do not specify explicit type parameters", group=disallow_any_group, ) add_invertible_flag( @@ -720,10 +721,9 @@ def add_invertible_flag( "https://mypy.readthedocs.io/en/stable/kinds_of_types.html#no-strict-optional", ) add_invertible_flag( - "--no-implicit-optional", + "--implicit-optional", default=False, - strict_flag=True, - help="Don't assume arguments with default values of None are Optional", + help="Assume arguments with default values of None are Optional", group=none_group, ) none_group.add_argument("--strict-optional", action="store_true", help=argparse.SUPPRESS) @@ -733,9 +733,6 @@ def add_invertible_flag( dest="strict_optional", help="Disable strict Optional checks (inverse: --strict-optional)", ) - none_group.add_argument( - "--strict-optional-whitelist", metavar="GLOB", nargs="*", help=argparse.SUPPRESS - ) lint_group = parser.add_argument_group( title="Configuring warnings", @@ -874,9 +871,9 @@ def add_invertible_flag( group=error_group, ) add_invertible_flag( - "--show-error-codes", + "--hide-error-codes", default=False, - help="Show error codes in error messages", + help="Hide error codes in error messages", group=error_group, ) add_invertible_flag( @@ -978,9 +975,19 @@ def add_invertible_flag( help="Use a custom typing module", ) internals_group.add_argument( - "--enable-recursive-aliases", + "--disable-recursive-aliases", action="store_true", - help="Experimental support for recursive type aliases", + help="Disable experimental support for recursive type aliases", + ) + # Deprecated reverse variant of the above. + internals_group.add_argument( + "--enable-recursive-aliases", action="store_true", help=argparse.SUPPRESS + ) + parser.add_argument( + "--enable-incomplete-feature", + action="append", + metavar="FEATURE", + help="Enable support of incomplete/experimental features for early preview", ) internals_group.add_argument( "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" @@ -1001,7 +1008,15 @@ def add_invertible_flag( help="When encountering SOURCE_FILE, read and type check " "the contents of SHADOW_FILE instead.", ) - add_invertible_flag("--fast-exit", default=True, help=argparse.SUPPRESS, group=internals_group) + internals_group.add_argument("--fast-exit", action="store_true", help=argparse.SUPPRESS) + internals_group.add_argument( + "--no-fast-exit", action="store_false", dest="fast_exit", help=argparse.SUPPRESS + ) + # This flag is useful for mypy tests, where function bodies may be omitted. Plugin developers + # may want to use this as well in their tests. + add_invertible_flag( + "--allow-empty-bodies", default=False, help=argparse.SUPPRESS, group=internals_group + ) report_group = parser.add_argument_group( title="Report generation", description="Generate a report in the specified format." @@ -1070,8 +1085,14 @@ def add_invertible_flag( "--inferstats", action="store_true", dest="dump_inference_stats", help=argparse.SUPPRESS ) parser.add_argument("--dump-build-stats", action="store_true", help=argparse.SUPPRESS) - # dump timing stats for each processed file into the given output file + # Dump timing stats for each processed file into the given output file parser.add_argument("--timing-stats", dest="timing_stats", help=argparse.SUPPRESS) + # Dump per line type checking timing stats for each processed file into the given + # output file. Only total time spent in each top level expression will be shown. + # Times are show in microseconds. + parser.add_argument( + "--line-checking-stats", dest="line_checking_stats", help=argparse.SUPPRESS + ) # --debug-cache will disable any cache-related compressions/optimizations, # which will make the cache writing process output pretty-printed JSON (which # is easier to debug). @@ -1105,9 +1126,19 @@ def add_invertible_flag( parser.add_argument( "--cache-map", nargs="+", dest="special-opts:cache_map", help=argparse.SUPPRESS ) + # --debug-serialize will run tree.serialize() even if cache generation is disabled. + # Useful for mypy_primer to detect serialize errors earlier. + parser.add_argument("--debug-serialize", action="store_true", help=argparse.SUPPRESS) + # This one is deprecated, but we will keep it for few releases. parser.add_argument( "--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS ) + parser.add_argument( + "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS + ) + parser.add_argument( + "--disable-memoryview-promotion", action="store_true", help=argparse.SUPPRESS + ) # options specifying code to check code_group = parser.add_argument_group( @@ -1220,8 +1251,13 @@ def set_strict_flags() -> None: # Paths listed in the config file will be ignored if any paths, modules or packages # are passed on the command line. - if options.files and not (special_opts.files or special_opts.packages or special_opts.modules): - special_opts.files = options.files + if not (special_opts.files or special_opts.packages or special_opts.modules): + if options.files: + special_opts.files = options.files + if options.packages: + special_opts.packages = options.packages + if options.modules: + special_opts.modules = options.modules # Check for invalid argument combinations. if require_targets: @@ -1267,10 +1303,22 @@ def set_strict_flags() -> None: # Enabling an error code always overrides disabling options.disabled_error_codes -= options.enabled_error_codes + # Validate incomplete features. + for feature in options.enable_incomplete_feature: + if feature not in INCOMPLETE_FEATURES: + parser.error(f"Unknown incomplete feature: {feature}") + if options.enable_incomplete_features: + print( + "Warning: --enable-incomplete-features is deprecated, use" + " --enable-incomplete-feature=FEATURE instead" + ) + options.enable_incomplete_feature = list(INCOMPLETE_FEATURES) + + # Compute absolute path for custom typeshed (if present). + if options.custom_typeshed_dir is not None: + options.abs_custom_typeshed_dir = os.path.abspath(options.custom_typeshed_dir) + # Set build flags. - if options.strict_optional_whitelist is not None: - # TODO: Deprecate, then kill this flag - options.strict_optional = True if special_opts.find_occurrences: state.find_occurrences = special_opts.find_occurrences.split(".") assert state.find_occurrences is not None @@ -1310,6 +1358,12 @@ def set_strict_flags() -> None: if options.logical_deps: options.cache_fine_grained = True + if options.enable_recursive_aliases: + print( + "Warning: --enable-recursive-aliases is deprecated;" + " recursive types are enabled by default" + ) + # Set target. if special_opts.modules + special_opts.packages: options.build_type = BuildType.MODULE diff --git a/mypy/maptype.py b/mypy/maptype.py index 2cec20a03189..cae904469fed 100644 --- a/mypy/maptype.py +++ b/mypy/maptype.py @@ -1,19 +1,8 @@ from __future__ import annotations -import mypy.typeops from mypy.expandtype import expand_type from mypy.nodes import TypeInfo -from mypy.types import ( - AnyType, - Instance, - ProperType, - TupleType, - Type, - TypeOfAny, - TypeVarId, - get_proper_type, - has_type_vars, -) +from mypy.types import AnyType, Instance, TupleType, Type, TypeOfAny, TypeVarId, has_type_vars def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance: @@ -37,8 +26,11 @@ def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Insta # Unfortunately we can't support this for generic recursive tuples. # If we skip this special casing we will fall back to tuple[Any, ...]. env = instance_to_type_environment(instance) - tuple_type = get_proper_type(expand_type(instance.type.tuple_type, env)) + tuple_type = expand_type(instance.type.tuple_type, env) if isinstance(tuple_type, TupleType): + # Make the import here to avoid cyclic imports. + import mypy.typeops + return mypy.typeops.tuple_fallback(tuple_type) if not superclass.type_vars: @@ -101,7 +93,6 @@ def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) - if b.type == supertype: env = instance_to_type_environment(instance) t = expand_type(b, env) - assert isinstance(t, ProperType) assert isinstance(t, Instance) result.append(t) diff --git a/mypy/meet.py b/mypy/meet.py index 21637f57f233..3214b4b43975 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -6,9 +6,16 @@ from mypy.erasetype import erase_type from mypy.maptype import map_instance_to_supertype from mypy.state import state -from mypy.subtypes import is_callable_compatible, is_equivalent, is_proper_subtype, is_subtype +from mypy.subtypes import ( + is_callable_compatible, + is_equivalent, + is_proper_subtype, + is_same_type, + is_subtype, +) from mypy.typeops import is_recursive_pair, make_simplified_union, tuple_fallback from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, AnyType, CallableType, DeletedType, @@ -61,11 +68,25 @@ def meet_types(s: Type, t: Type) -> ProperType: """Return the greatest lower bound of two types.""" if is_recursive_pair(s, t): # This case can trigger an infinite recursion, general support for this will be - # tricky so we use a trivial meet (like for protocols). + # tricky, so we use a trivial meet (like for protocols). return trivial_meet(s, t) s = get_proper_type(s) t = get_proper_type(t) + if isinstance(s, Instance) and isinstance(t, Instance) and s.type == t.type: + # Code in checker.py should merge any extra_items where possible, so we + # should have only compatible extra_items here. We check this before + # the below subtype check, so that extra_attrs will not get erased. + if (s.extra_attrs or t.extra_attrs) and is_same_type(s, t): + if s.extra_attrs and t.extra_attrs: + if len(s.extra_attrs.attrs) > len(t.extra_attrs.attrs): + # Return the one that has more precise information. + return s + return t + if s.extra_attrs: + return s + return t + if not isinstance(s, UnboundType) and not isinstance(t, UnboundType): if is_proper_subtype(s, t, ignore_promotions=True): return s @@ -78,6 +99,10 @@ def meet_types(s: Type, t: Type) -> ProperType: return t if isinstance(s, UnionType) and not isinstance(t, UnionType): s, t = t, s + + # Meets/joins require callable type normalization. + s, t = join.normalize_callables(s, t) + return t.accept(TypeMeetVisitor(s)) @@ -97,7 +122,19 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type: return original_declared if isinstance(declared, UnionType): return make_simplified_union( - [narrow_declared_type(x, narrowed) for x in declared.relevant_items()] + [ + narrow_declared_type(x, narrowed) + for x in declared.relevant_items() + # This (ugly) special-casing is needed to support checking + # branches like this: + # x: Union[float, complex] + # if isinstance(x, int): + # ... + if ( + is_overlapping_types(x, narrowed, ignore_promotions=True) + or is_subtype(narrowed, x, ignore_promotions=False) + ) + ] ) if is_enum_overlapping_union(declared, narrowed): return original_narrowed @@ -127,6 +164,14 @@ def narrow_declared_type(declared: Type, narrowed: Type) -> Type: if declared.type.alt_promote: # Special case: low-level integer type can't be narrowed return original_declared + if ( + isinstance(narrowed, Instance) + and narrowed.type.alt_promote + and narrowed.type.alt_promote.type is declared.type + ): + # Special case: 'int' can't be narrowed down to a native int type such as + # i64, since they have different runtime representations. + return original_declared return meet_types(original_declared, original_narrowed) elif isinstance(declared, (TupleType, TypeType, LiteralType)): return meet_types(original_declared, original_narrowed) @@ -211,6 +256,7 @@ def is_overlapping_types( right: Type, ignore_promotions: bool = False, prohibit_none_typevar_overlap: bool = False, + ignore_uninhabited: bool = False, ) -> bool: """Can a value of type 'left' also be of type 'right' or vice-versa? @@ -235,6 +281,7 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: right, ignore_promotions=ignore_promotions, prohibit_none_typevar_overlap=prohibit_none_typevar_overlap, + ignore_uninhabited=ignore_uninhabited, ) # We should never encounter this type. @@ -282,8 +329,10 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: ): return True - if is_proper_subtype(left, right, ignore_promotions=ignore_promotions) or is_proper_subtype( - right, left, ignore_promotions=ignore_promotions + if is_proper_subtype( + left, right, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited + ) or is_proper_subtype( + right, left, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited ): return True @@ -293,7 +342,22 @@ def _is_overlapping_types(left: Type, right: Type) -> bool: left_possible = get_possible_variants(left) right_possible = get_possible_variants(right) - # We start by checking multi-variant types like Unions first. We also perform + # First handle special cases relating to PEP 612: + # - comparing a `Parameters` to a `Parameters` + # - comparing a `Parameters` to a `ParamSpecType` + # - comparing a `ParamSpecType` to a `ParamSpecType` + # + # These should all always be considered overlapping equality checks. + # These need to be done before we move on to other TypeVarLike comparisons. + if isinstance(left, (Parameters, ParamSpecType)) and isinstance( + right, (Parameters, ParamSpecType) + ): + return True + # A `Parameters` does not overlap with anything else, however + if isinstance(left, Parameters) or isinstance(right, Parameters): + return False + + # Now move on to checking multi-variant types like Unions. We also perform # the same logic if either type happens to be a TypeVar/ParamSpec/TypeVarTuple. # # Handling the TypeVarLikes now lets us simulate having them bind to the corresponding @@ -390,18 +454,13 @@ def _type_object_overlap(left: Type, right: Type) -> bool: return _type_object_overlap(left, right) or _type_object_overlap(right, left) if isinstance(left, CallableType) and isinstance(right, CallableType): - - def _callable_overlap(left: CallableType, right: CallableType) -> bool: - return is_callable_compatible( - left, - right, - is_compat=_is_overlapping_types, - ignore_pos_arg_names=True, - allow_partial_overlap=True, - ) - - # Compare both directions to handle type objects. - return _callable_overlap(left, right) or _callable_overlap(right, left) + return is_callable_compatible( + left, + right, + is_compat=_is_overlapping_types, + ignore_pos_arg_names=True, + allow_partial_overlap=True, + ) elif isinstance(left, CallableType): left = left.fallback elif isinstance(right, CallableType): @@ -425,11 +484,16 @@ def _callable_overlap(left: CallableType, right: CallableType) -> bool: if isinstance(left, Instance) and isinstance(right, Instance): # First we need to handle promotions and structural compatibility for instances # that came as fallbacks, so simply call is_subtype() to avoid code duplication. - if is_subtype(left, right, ignore_promotions=ignore_promotions) or is_subtype( - right, left, ignore_promotions=ignore_promotions + if is_subtype( + left, right, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited + ) or is_subtype( + right, left, ignore_promotions=ignore_promotions, ignore_uninhabited=ignore_uninhabited ): return True + if right.type.fullname == "builtins.int" and left.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True + # Two unrelated types cannot be partially overlapping: they're disjoint. if left.type.has_base(right.type.fullname): left = map_instance_to_supertype(left, right.type) @@ -467,7 +531,7 @@ def _callable_overlap(left: CallableType, right: CallableType) -> bool: # Note: it's unclear however, whether returning False is the right thing # to do when inferring reachability -- see https://github.com/python/mypy/issues/5529 - assert type(left) != type(right) + assert type(left) != type(right), f"{type(left)} vs {type(right)}" return False @@ -666,10 +730,10 @@ def visit_instance(self, t: Instance) -> ProperType: return NoneType() else: alt_promote = t.type.alt_promote - if alt_promote and alt_promote is self.s.type: + if alt_promote and alt_promote.type is self.s.type: return t alt_promote = self.s.type.alt_promote - if alt_promote and alt_promote is t.type: + if alt_promote and alt_promote.type is t.type: return self.s if is_subtype(t, self.s): return t diff --git a/mypy/memprofile.py b/mypy/memprofile.py index 7c479a6480cc..20e18c3c0bf2 100644 --- a/mypy/memprofile.py +++ b/mypy/memprofile.py @@ -35,7 +35,7 @@ def collect_memory_stats() -> tuple[dict[str, int], dict[str, int]]: if hasattr(obj, "__dict__"): # Keep track of which class a particular __dict__ is associated with. inferred[id(obj.__dict__)] = f"{n} (__dict__)" - if isinstance(obj, (Node, Type)): # type: ignore + if isinstance(obj, (Node, Type)): # type: ignore[misc] if hasattr(obj, "__dict__"): for x in obj.__dict__.values(): if isinstance(x, list): diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 4ddf3c9f1a8c..e00aca2869bd 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -21,13 +21,21 @@ class ErrorMessage(NamedTuple): def format(self, *args: object, **kwargs: object) -> ErrorMessage: return ErrorMessage(self.value.format(*args, **kwargs), code=self.code) + def with_additional_msg(self, info: str) -> ErrorMessage: + return ErrorMessage(self.value + info, code=self.code) + # Invalid types -INVALID_TYPE_RAW_ENUM_VALUE: Final = "Invalid type: try using Literal[{}.{}] instead?" +INVALID_TYPE_RAW_ENUM_VALUE: Final = ErrorMessage( + "Invalid type: try using Literal[{}.{}] instead?", codes.VALID_TYPE +) # Type checker error message constants NO_RETURN_VALUE_EXPECTED: Final = ErrorMessage("No return value expected", codes.RETURN_VALUE) MISSING_RETURN_STATEMENT: Final = ErrorMessage("Missing return statement", codes.RETURN) +EMPTY_BODY_ABSTRACT: Final = ErrorMessage( + "If the method is meant to be abstract, use @abc.abstractmethod", codes.EMPTY_BODY +) INVALID_IMPLICIT_RETURN: Final = ErrorMessage("Implicit return in function which does not return") INCOMPATIBLE_RETURN_VALUE_TYPE: Final = ErrorMessage( "Incompatible return value type", codes.RETURN_VALUE @@ -36,6 +44,9 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: NO_RETURN_EXPECTED: Final = ErrorMessage("Return statement in function which does not return") INVALID_EXCEPTION: Final = ErrorMessage("Exception must be derived from BaseException") INVALID_EXCEPTION_TYPE: Final = ErrorMessage("Exception type must be derived from BaseException") +INVALID_EXCEPTION_GROUP: Final = ErrorMessage( + "Exception type in except* cannot derive from BaseExceptionGroup" +) RETURN_IN_ASYNC_GENERATOR: Final = ErrorMessage( '"return" with value in async generator is not allowed' ) @@ -47,8 +58,10 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: "supertypes" ) YIELD_VALUE_EXPECTED: Final = ErrorMessage("Yield value expected") -INCOMPATIBLE_TYPES: Final = "Incompatible types" -INCOMPATIBLE_TYPES_IN_ASSIGNMENT: Final = "Incompatible types in assignment" +INCOMPATIBLE_TYPES: Final = ErrorMessage("Incompatible types") +INCOMPATIBLE_TYPES_IN_ASSIGNMENT: Final = ErrorMessage( + "Incompatible types in assignment", code=codes.ASSIGNMENT +) INCOMPATIBLE_TYPES_IN_AWAIT: Final = ErrorMessage('Incompatible types in "await"') INCOMPATIBLE_REDEFINITION: Final = ErrorMessage("Incompatible redefinition") INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER: Final = ( @@ -94,7 +107,7 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: FUNCTION_PARAMETER_CANNOT_BE_COVARIANT: Final = ErrorMessage( "Cannot use a covariant type variable as a parameter" ) -INCOMPATIBLE_IMPORT_OF: Final = "Incompatible import of" +INCOMPATIBLE_IMPORT_OF: Final = ErrorMessage('Incompatible import of "{}"', code=codes.ASSIGNMENT) FUNCTION_TYPE_EXPECTED: Final = ErrorMessage( "Function is missing a type annotation", codes.NO_UNTYPED_DEF ) @@ -124,6 +137,7 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: MODULE_LEVEL_GETATTRIBUTE: Final = ErrorMessage( "__getattribute__ is not valid at the module level" ) +CLASS_VAR_CONFLICTS_SLOTS: Final = '"{}" in __slots__ conflicts with class variable access' NAME_NOT_IN_SLOTS: Final = ErrorMessage( 'Trying to assign name "{}" that is not in "__slots__" of type "{}"' ) @@ -138,7 +152,11 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: code=codes.TRUTHY_BOOL, ) FUNCTION_ALWAYS_TRUE: Final = ErrorMessage( - "Function {} could always be true in boolean context", code=codes.TRUTHY_BOOL + "Function {} could always be true in boolean context", code=codes.TRUTHY_FUNCTION +) +ITERABLE_ALWAYS_TRUE: Final = ErrorMessage( + "{} which can always be true in boolean context. Consider using {} instead.", + code=codes.TRUTHY_ITERABLE, ) NOT_CALLABLE: Final = "{} not callable" TYPE_MUST_BE_USED: Final = "Value of type {} must be used" @@ -165,7 +183,7 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: TYPEVAR_UNEXPECTED_ARGUMENT: Final = 'Unexpected argument to "TypeVar()"' UNBOUND_TYPEVAR: Final = ( "A function returning TypeVar should receive at least " - "one argument containing the same Typevar" + "one argument containing the same TypeVar" ) # Super @@ -221,6 +239,7 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: "variable" ) CLASS_VAR_WITH_TYPEVARS: Final = "ClassVar cannot contain type variables" +CLASS_VAR_WITH_GENERIC_SELF: Final = "ClassVar cannot contain Self type in generic classes" CLASS_VAR_OUTSIDE_OF_CLASS: Final = "ClassVar can only be used for assignments in class body" # Protocol @@ -251,3 +270,7 @@ def format(self, *args: object, **kwargs: object) -> ErrorMessage: CLASS_PATTERN_UNKNOWN_KEYWORD: Final = 'Class "{}" has no attribute "{}"' MULTIPLE_ASSIGNMENTS_IN_PATTERN: Final = 'Multiple assignments to name "{}" in pattern' CANNOT_MODIFY_MATCH_ARGS: Final = 'Cannot assign to "__match_args__"' + +DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL: Final = ( + '"alias" argument to dataclass field must be a string literal' +) diff --git a/mypy/messages.py b/mypy/messages.py index d93541e94c9c..ba2508033790 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -12,10 +12,11 @@ from __future__ import annotations import difflib +import itertools import re from contextlib import contextmanager from textwrap import dedent -from typing import Any, Callable, Iterable, Iterator, List, Sequence, cast +from typing import Any, Callable, Collection, Iterable, Iterator, List, Sequence, cast from typing_extensions import Final from mypy import errorcodes as codes, message_registry @@ -54,6 +55,7 @@ IS_CLASS_OR_STATIC, IS_CLASSVAR, IS_SETTABLE, + IS_VAR, find_member, get_member_flags, is_same_type, @@ -79,10 +81,12 @@ TypedDictType, TypeOfAny, TypeType, + TypeVarTupleType, TypeVarType, UnboundType, UninhabitedType, UnionType, + UnpackType, get_proper_type, get_proper_types, ) @@ -119,8 +123,6 @@ # test-data/unit/fixtures/) that provides the definition. This is used for # generating better error messages when running mypy tests only. SUGGESTED_TEST_FIXTURES: Final = { - "builtins.list": "list.pyi", - "builtins.dict": "dict.pyi", "builtins.set": "set.pyi", "builtins.tuple": "tuple.pyi", "builtins.bool": "bool.pyi", @@ -129,6 +131,7 @@ "builtins.isinstance": "isinstancelist.pyi", "builtins.property": "property.pyi", "builtins.classmethod": "classmethod.pyi", + "typing._SpecialForm": "typing-medium.pyi", } @@ -162,7 +165,10 @@ def __init__(self, errors: Errors, modules: dict[str, MypyFile]) -> None: # def filter_errors( - self, *, filter_errors: bool = True, save_filtered_errors: bool = False + self, + *, + filter_errors: bool | Callable[[str, ErrorInfo], bool] = True, + save_filtered_errors: bool = False, ) -> ErrorWatcher: return ErrorWatcher( self.errors, filter_errors=filter_errors, save_filtered_errors=save_filtered_errors @@ -184,6 +190,14 @@ def disable_type_names(self) -> Iterator[None]: def are_type_names_disabled(self) -> bool: return len(self._disable_type_names) > 0 and self._disable_type_names[-1] + def prefer_simple_messages(self) -> bool: + """Should we generate simple/fast error messages? + + If errors aren't shown to the user, we don't want to waste cyles producing + complex error messages. + """ + return self.errors.prefer_simple_messages() + def report( self, msg: str, @@ -195,6 +209,7 @@ def report( origin: Context | None = None, offset: int = 0, allow_dups: bool = False, + secondary_context: Context | None = None, ) -> None: """Report an error or note (unless disabled). @@ -202,7 +217,7 @@ def report( where # type: ignore comments have effect. """ - def span_from_context(ctx: Context) -> tuple[int, int]: + def span_from_context(ctx: Context) -> Iterable[int]: """This determines where a type: ignore for a given context has effect. Current logic is a bit tricky, to keep as much backwards compatibility as @@ -210,22 +225,27 @@ def span_from_context(ctx: Context) -> tuple[int, int]: simplify it) when we drop Python 3.7. """ if isinstance(ctx, (ClassDef, FuncDef)): - return ctx.deco_line or ctx.line, ctx.line + return range(ctx.deco_line or ctx.line, ctx.line + 1) elif not isinstance(ctx, Expression): - return ctx.line, ctx.line + return [ctx.line] else: - return ctx.line, ctx.end_line or ctx.line + return range(ctx.line, (ctx.end_line or ctx.line) + 1) - origin_span: tuple[int, int] | None + origin_span: Iterable[int] | None if origin is not None: origin_span = span_from_context(origin) elif context is not None: origin_span = span_from_context(context) else: origin_span = None + + if secondary_context is not None: + assert origin_span is not None + origin_span = itertools.chain(origin_span, span_from_context(secondary_context)) + self.errors.report( - context.get_line() if context else -1, - context.get_column() if context else -1, + context.line if context else -1, + context.column if context else -1, msg, severity=severity, file=file, @@ -245,9 +265,18 @@ def fail( code: ErrorCode | None = None, file: str | None = None, allow_dups: bool = False, + secondary_context: Context | None = None, ) -> None: """Report an error message (unless disabled).""" - self.report(msg, context, "error", code=code, file=file, allow_dups=allow_dups) + self.report( + msg, + context, + "error", + code=code, + file=file, + allow_dups=allow_dups, + secondary_context=secondary_context, + ) def note( self, @@ -259,6 +288,7 @@ def note( allow_dups: bool = False, *, code: ErrorCode | None = None, + secondary_context: Context | None = None, ) -> None: """Report a note (unless disabled).""" self.report( @@ -270,6 +300,7 @@ def note( offset=offset, allow_dups=allow_dups, code=code, + secondary_context=secondary_context, ) def note_multiline( @@ -280,11 +311,20 @@ def note_multiline( offset: int = 0, allow_dups: bool = False, code: ErrorCode | None = None, + *, + secondary_context: Context | None = None, ) -> None: """Report as many notes as lines in the message (unless disabled).""" for msg in messages.splitlines(): self.report( - msg, context, "note", file=file, offset=offset, allow_dups=allow_dups, code=code + msg, + context, + "note", + file=file, + offset=offset, + allow_dups=allow_dups, + code=code, + secondary_context=secondary_context, ) # @@ -403,32 +443,43 @@ def has_no_attr( if not self.are_type_names_disabled(): failed = False if isinstance(original_type, Instance) and original_type.type.names: - alternatives = set(original_type.type.names.keys()) - - if module_symbol_table is not None: - alternatives |= {key for key in module_symbol_table.keys()} - - # in some situations, the member is in the alternatives set - # but since we're in this function, we shouldn't suggest it - if member in alternatives: - alternatives.remove(member) - - matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] - matches.extend(best_matches(member, alternatives)[:3]) - if member == "__aiter__" and matches == ["__iter__"]: - matches = [] # Avoid misleading suggestion - if matches: + if ( + module_symbol_table is not None + and member in module_symbol_table + and not module_symbol_table[member].module_public + ): self.fail( - '{} has no attribute "{}"; maybe {}?{}'.format( - format_type(original_type), - member, - pretty_seq(matches, "or"), - extra, - ), + f"{format_type(original_type, module_names=True)} does not " + f'explicitly export attribute "{member}"', context, code=codes.ATTR_DEFINED, ) failed = True + else: + alternatives = set(original_type.type.names.keys()) + if module_symbol_table is not None: + alternatives |= { + k for k, v in module_symbol_table.items() if v.module_public + } + # Rare but possible, see e.g. testNewAnalyzerCyclicDefinitionCrossModule + alternatives.discard(member) + + matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] + matches.extend(best_matches(member, alternatives, n=3)) + if member == "__aiter__" and matches == ["__iter__"]: + matches = [] # Avoid misleading suggestion + if matches: + self.fail( + '{} has no attribute "{}"; maybe {}?{}'.format( + format_type(original_type), + member, + pretty_seq(matches, "or"), + extra, + ), + context, + code=codes.ATTR_DEFINED, + ) + failed = True if not failed: self.fail( '{} has no attribute "{}"{}'.format( @@ -578,20 +629,18 @@ def incompatible_argument( ) return codes.INDEX else: - msg = "{} (expression has type {}, target has type {})" arg_type_str, callee_type_str = format_type_distinctly( arg_type, callee.arg_types[n - 1] ) - self.fail( - msg.format( - message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT, - arg_type_str, - callee_type_str, - ), - context, - code=codes.ASSIGNMENT, + info = ( + f" (expression has type {arg_type_str}, " + f"target has type {callee_type_str})" + ) + error_msg = ( + message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT.with_additional_msg(info) ) - return codes.ASSIGNMENT + self.fail(error_msg.value, context, code=error_msg.code) + return error_msg.code target = f"to {name} " @@ -669,64 +718,69 @@ def incompatible_argument( actual_type_str, expected_type_str ) else: - try: - expected_type = callee.arg_types[m - 1] - except IndexError: # Varargs callees - expected_type = callee.arg_types[-1] - arg_type_str, expected_type_str = format_type_distinctly( - arg_type, expected_type, bare=True - ) - if arg_kind == ARG_STAR: - arg_type_str = "*" + arg_type_str - elif arg_kind == ARG_STAR2: - arg_type_str = "**" + arg_type_str - - # For function calls with keyword arguments, display the argument name rather than the - # number. - arg_label = str(n) - if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: - arg_name = outer_context.arg_names[n - 1] - if arg_name is not None: - arg_label = f'"{arg_name}"' - if ( - arg_kind == ARG_STAR2 - and isinstance(arg_type, TypedDictType) - and m <= len(callee.arg_names) - and callee.arg_names[m - 1] is not None - and callee.arg_kinds[m - 1] != ARG_STAR2 - ): - arg_name = callee.arg_names[m - 1] - assert arg_name is not None - arg_type_str, expected_type_str = format_type_distinctly( - arg_type.items[arg_name], expected_type, bare=True - ) - arg_label = f'"{arg_name}"' - if isinstance(outer_context, IndexExpr) and isinstance(outer_context.index, StrExpr): - msg = 'Value of "{}" has incompatible type {}; expected {}'.format( - outer_context.index.value, - quote_type_string(arg_type_str), - quote_type_string(expected_type_str), - ) + if self.prefer_simple_messages(): + msg = "Argument has incompatible type" else: - msg = "Argument {} {}has incompatible type {}; expected {}".format( - arg_label, - target, - quote_type_string(arg_type_str), - quote_type_string(expected_type_str), + try: + expected_type = callee.arg_types[m - 1] + except IndexError: # Varargs callees + expected_type = callee.arg_types[-1] + arg_type_str, expected_type_str = format_type_distinctly( + arg_type, expected_type, bare=True ) + if arg_kind == ARG_STAR: + arg_type_str = "*" + arg_type_str + elif arg_kind == ARG_STAR2: + arg_type_str = "**" + arg_type_str + + # For function calls with keyword arguments, display the argument name rather + # than the number. + arg_label = str(n) + if isinstance(outer_context, CallExpr) and len(outer_context.arg_names) >= n: + arg_name = outer_context.arg_names[n - 1] + if arg_name is not None: + arg_label = f'"{arg_name}"' + if ( + arg_kind == ARG_STAR2 + and isinstance(arg_type, TypedDictType) + and m <= len(callee.arg_names) + and callee.arg_names[m - 1] is not None + and callee.arg_kinds[m - 1] != ARG_STAR2 + ): + arg_name = callee.arg_names[m - 1] + assert arg_name is not None + arg_type_str, expected_type_str = format_type_distinctly( + arg_type.items[arg_name], expected_type, bare=True + ) + arg_label = f'"{arg_name}"' + if isinstance(outer_context, IndexExpr) and isinstance( + outer_context.index, StrExpr + ): + msg = 'Value of "{}" has incompatible type {}; expected {}'.format( + outer_context.index.value, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + else: + msg = "Argument {} {}has incompatible type {}; expected {}".format( + arg_label, + target, + quote_type_string(arg_type_str), + quote_type_string(expected_type_str), + ) + expected_type = get_proper_type(expected_type) + if isinstance(expected_type, UnionType): + expected_types = list(expected_type.items) + else: + expected_types = [expected_type] + for type in get_proper_types(expected_types): + if isinstance(arg_type, Instance) and isinstance(type, Instance): + notes = append_invariance_notes(notes, arg_type, type) object_type = get_proper_type(object_type) if isinstance(object_type, TypedDictType): code = codes.TYPEDDICT_ITEM else: code = codes.ARG_TYPE - expected_type = get_proper_type(expected_type) - if isinstance(expected_type, UnionType): - expected_types = list(expected_type.items) - else: - expected_types = [expected_type] - for type in get_proper_types(expected_types): - if isinstance(arg_type, Instance) and isinstance(type, Instance): - notes = append_invariance_notes(notes, arg_type, type) self.fail(msg, context, code=code) if notes: for note_msg in notes: @@ -740,7 +794,11 @@ def incompatible_argument_note( context: Context, code: ErrorCode | None, ) -> None: - if isinstance(original_caller_type, (Instance, TupleType, TypedDictType)): + if self.prefer_simple_messages(): + return + if isinstance( + original_caller_type, (Instance, TupleType, TypedDictType, TypeType, CallableType) + ): if isinstance(callee_type, Instance) and callee_type.type.is_protocol: self.report_protocol_problems( original_caller_type, callee_type, context, code=code @@ -787,8 +845,8 @@ def maybe_note_concatenate_pos_args( if names: missing_arguments = '"' + '", "'.join(names) + '"' self.note( - f'This may be because "{original_caller_type.name}" has arguments ' - f"named: {missing_arguments}", + f'This is likely because "{original_caller_type.name}" has named arguments: ' + f"{missing_arguments}. Consider marking them positional-only", context, code=code, ) @@ -814,7 +872,9 @@ def invalid_index_type( def too_few_arguments( self, callee: CallableType, context: Context, argument_names: Sequence[str | None] | None ) -> None: - if argument_names is not None: + if self.prefer_simple_messages(): + msg = "Too few arguments" + elif argument_names is not None: num_positional_args = sum(k is None for k in argument_names) arguments_left = callee.arg_names[num_positional_args : callee.min_args] diff = [k for k in arguments_left if k not in argument_names] @@ -838,7 +898,10 @@ def missing_named_argument(self, callee: CallableType, context: Context, name: s self.fail(msg, context, code=codes.CALL_ARG) def too_many_arguments(self, callee: CallableType, context: Context) -> None: - msg = "Too many arguments" + for_function(callee) + if self.prefer_simple_messages(): + msg = "Too many arguments" + else: + msg = "Too many arguments" + for_function(callee) self.fail(msg, context, code=codes.CALL_ARG) self.maybe_note_about_special_args(callee, context) @@ -856,11 +919,16 @@ def too_many_arguments_from_typed_dict( self.fail(msg, context) def too_many_positional_arguments(self, callee: CallableType, context: Context) -> None: - msg = "Too many positional arguments" + for_function(callee) + if self.prefer_simple_messages(): + msg = "Too many positional arguments" + else: + msg = "Too many positional arguments" + for_function(callee) self.fail(msg, context) self.maybe_note_about_special_args(callee, context) def maybe_note_about_special_args(self, callee: CallableType, context: Context) -> None: + if self.prefer_simple_messages(): + return # https://github.com/python/mypy/issues/11309 first_arg = callee.def_extras.get("first_arg") if first_arg and first_arg not in {"self", "cls", "mcs"}: @@ -885,11 +953,11 @@ def unexpected_keyword_argument( matching_type_args.append(callee_arg_name) else: not_matching_type_args.append(callee_arg_name) - matches = best_matches(name, matching_type_args) + matches = best_matches(name, matching_type_args, n=3) if not matches: - matches = best_matches(name, not_matching_type_args) + matches = best_matches(name, not_matching_type_args, n=3) if matches: - msg += f"; did you mean {pretty_seq(matches[:3], 'or')}?" + msg += f"; did you mean {pretty_seq(matches, 'or')}?" self.fail(msg, context, code=codes.CALL_ARG) module = find_defining_module(self.modules, callee) if module: @@ -1110,6 +1178,7 @@ def argument_incompatible_with_supertype( arg_type_in_supertype: Type, supertype: str, context: Context, + secondary_context: Context, ) -> None: target = self.override_target(name, name_in_supertype, supertype) arg_type_in_supertype_f = format_type_bare(arg_type_in_supertype) @@ -1120,17 +1189,26 @@ def argument_incompatible_with_supertype( ), context, code=codes.OVERRIDE, + secondary_context=secondary_context, + ) + self.note( + "This violates the Liskov substitution principle", + context, + code=codes.OVERRIDE, + secondary_context=secondary_context, ) - self.note("This violates the Liskov substitution principle", context, code=codes.OVERRIDE) self.note( "See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides", context, code=codes.OVERRIDE, + secondary_context=secondary_context, ) if name == "__eq__" and type_name: multiline_msg = self.comparison_method_example_msg(class_name=type_name) - self.note_multiline(multiline_msg, context, code=codes.OVERRIDE) + self.note_multiline( + multiline_msg, context, code=codes.OVERRIDE, secondary_context=secondary_context + ) def comparison_method_example_msg(self, class_name: str) -> str: return dedent( @@ -1210,6 +1288,12 @@ def invalid_keyword_var_arg(self, typ: Type, is_mapping: bool, context: Context) def undefined_in_superclass(self, member: str, context: Context) -> None: self.fail(f'"{member}" undefined in superclass', context) + def variable_may_be_undefined(self, name: str, context: Context) -> None: + self.fail(f'Name "{name}" may be undefined', context, code=codes.POSSIBLY_UNDEFINED) + + def var_used_before_def(self, name: str, context: Context) -> None: + self.fail(f'Name "{name}" is used before definition', context, code=codes.USED_BEFORE_DEF) + def first_argument_for_super_must_be_type(self, actual: Type, context: Context) -> None: actual = get_proper_type(actual) if isinstance(actual, Instance): @@ -1224,6 +1308,14 @@ def first_argument_for_super_must_be_type(self, actual: Type, context: Context) code=codes.ARG_TYPE, ) + def unsafe_super(self, method: str, cls: str, ctx: Context) -> None: + self.fail( + 'Call to abstract method "{}" of "{}" with trivial body' + " via super() is unsafe".format(method, cls), + ctx, + code=codes.SAFE_SUPER, + ) + def too_few_string_formatting_arguments(self, context: Context) -> None: self.fail("Not enough arguments for format string", context, code=codes.STRING_FORMATTING) @@ -1295,8 +1387,17 @@ def incompatible_self_argument( context, ) - def incompatible_conditional_function_def(self, defn: FuncDef) -> None: - self.fail("All conditional function variants must have identical " "signatures", defn) + def incompatible_conditional_function_def( + self, defn: FuncDef, old_type: FunctionLike, new_type: FunctionLike + ) -> None: + self.fail("All conditional function variants must have identical signatures", defn) + if isinstance(old_type, (CallableType, Overloaded)) and isinstance( + new_type, (CallableType, Overloaded) + ): + self.note("Original:", defn) + self.pretty_callable_or_overload(old_type, defn, offset=4) + self.note("Redefinition:", defn) + self.pretty_callable_or_overload(new_type, defn, offset=4) def cannot_instantiate_abstract_class( self, class_name: str, abstract_attributes: dict[str, bool], context: Context @@ -1317,15 +1418,16 @@ def cannot_instantiate_abstract_class( return if len(attrs_with_none) == 1: note = ( - "The following method was marked implicitly abstract because it has an empty " - "function body: {}. If it is not meant to be abstract, explicitly return None." + f"{attrs_with_none[0]} is implicitly abstract because it has an empty function " + "body. If it is not meant to be abstract, explicitly `return` or `return None`." ) else: note = ( "The following methods were marked implicitly abstract because they have empty " - "function bodies: {}. If they are not meant to be abstract, explicitly return None." + f"function bodies: {format_string_list(attrs_with_none)}. " + "If they are not meant to be abstract, explicitly `return` or `return None`." ) - self.note(note.format(format_string_list(attrs_with_none)), context, code=codes.ABSTRACT) + self.note(note, context, code=codes.ABSTRACT) def base_class_definitions_incompatible( self, name: str, base1: TypeInfo, base2: TypeInfo, context: Context @@ -1337,7 +1439,7 @@ def base_class_definitions_incompatible( ) def cant_assign_to_method(self, context: Context) -> None: - self.fail(message_registry.CANNOT_ASSIGN_TO_METHOD, context, code=codes.ASSIGNMENT) + self.fail(message_registry.CANNOT_ASSIGN_TO_METHOD, context, code=codes.METHOD_ASSIGN) def cant_assign_to_classvar(self, name: str, context: Context) -> None: self.fail(f'Cannot assign to class variable "{name}" via instance', context) @@ -1520,23 +1622,30 @@ def need_annotation_for_var( ) -> None: hint = "" has_variable_annotations = not python_version or python_version >= (3, 6) + pep604_supported = not python_version or python_version >= (3, 10) + # type to recommend the user adds + recommended_type = None # Only gives hint if it's a variable declaration and the partial type is a builtin type - if ( - python_version - and isinstance(node, Var) - and isinstance(node.type, PartialType) - and node.type.type - and node.type.type.fullname in reverse_builtin_aliases - ): - alias = reverse_builtin_aliases[node.type.type.fullname] - alias = alias.split(".")[-1] + if python_version and isinstance(node, Var) and isinstance(node.type, PartialType): type_dec = "" - if alias == "Dict": - type_dec = f"{type_dec}, {type_dec}" + if not node.type.type: + # partial None + if pep604_supported: + recommended_type = f"{type_dec} | None" + else: + recommended_type = f"Optional[{type_dec}]" + elif node.type.type.fullname in reverse_builtin_aliases: + # partial types other than partial None + alias = reverse_builtin_aliases[node.type.type.fullname] + alias = alias.split(".")[-1] + if alias == "Dict": + type_dec = f"{type_dec}, {type_dec}" + recommended_type = f"{alias}[{type_dec}]" + if recommended_type is not None: if has_variable_annotations: - hint = f' (hint: "{node.name}: {alias}[{type_dec}] = ...")' + hint = f' (hint: "{node.name}: {recommended_type} = ...")' else: - hint = f' (hint: "{node.name} = ... # type: {alias}[{type_dec}]")' + hint = f' (hint: "{node.name} = ... # type: {recommended_type}")' if has_variable_annotations: needed = "annotation" @@ -1563,9 +1672,9 @@ def unexpected_typeddict_keys( expected_set = set(expected_keys) if not typ.is_anonymous(): # Generate simpler messages for some common special cases. - if actual_set < expected_set: - # Use list comprehension instead of set operations to preserve order. - missing = [key for key in expected_keys if key not in actual_set] + # Use list comprehension instead of set operations to preserve order. + missing = [key for key in expected_keys if key not in actual_set] + if missing: self.fail( "Missing {} for TypedDict {}".format( format_key_list(missing, short=True), format_type(typ) @@ -1573,20 +1682,18 @@ def unexpected_typeddict_keys( context, code=codes.TYPEDDICT_ITEM, ) + extra = [key for key in actual_keys if key not in expected_set] + if extra: + self.fail( + "Extra {} for TypedDict {}".format( + format_key_list(extra, short=True), format_type(typ) + ), + context, + code=codes.TYPEDDICT_UNKNOWN_KEY, + ) + if missing or extra: + # No need to check for further errors return - else: - extra = [key for key in actual_keys if key not in expected_set] - if extra: - # If there are both extra and missing keys, only report extra ones for - # simplicity. - self.fail( - "Extra {} for TypedDict {}".format( - format_key_list(extra, short=True), format_type(typ) - ), - context, - code=codes.TYPEDDICT_ITEM, - ) - return found = format_key_list(actual_keys, short=True) if not expected_keys: self.fail(f"Unexpected TypedDict {found}", context) @@ -1606,8 +1713,15 @@ def typeddict_key_must_be_string_literal(self, typ: TypedDictType, context: Cont ) def typeddict_key_not_found( - self, typ: TypedDictType, item_name: str, context: Context + self, typ: TypedDictType, item_name: str, context: Context, setitem: bool = False ) -> None: + """Handle error messages for TypedDicts that have unknown keys. + + Note, that we differentiate in between reading a value and setting a + value. + Setting a value on a TypedDict is an 'unknown-key' error, whereas + reading it is the more serious/general 'item' error. + """ if typ.is_anonymous(): self.fail( '"{}" is not a valid TypedDict key; expected one of {}'.format( @@ -1616,22 +1730,21 @@ def typeddict_key_not_found( context, ) else: + err_code = codes.TYPEDDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM self.fail( - f'TypedDict {format_type(typ)} has no key "{item_name}"', - context, - code=codes.TYPEDDICT_ITEM, + f'TypedDict {format_type(typ)} has no key "{item_name}"', context, code=err_code ) - matches = best_matches(item_name, typ.items.keys()) + matches = best_matches(item_name, typ.items.keys(), n=3) if matches: self.note( - "Did you mean {}?".format(pretty_seq(matches[:3], "or")), - context, - code=codes.TYPEDDICT_ITEM, + "Did you mean {}?".format(pretty_seq(matches, "or")), context, code=err_code ) def typeddict_context_ambiguous(self, types: list[TypedDictType], context: Context) -> None: formatted_types = ", ".join(list(format_type_distinctly(*types))) - self.fail(f"Type of TypedDict is ambiguous, could be any of ({formatted_types})", context) + self.fail( + f"Type of TypedDict is ambiguous, none of ({formatted_types}) matches cleanly", context + ) def typeddict_key_cannot_be_deleted( self, typ: TypedDictType, item_name: str, context: Context @@ -1711,12 +1824,16 @@ def bad_proto_variance( def concrete_only_assign(self, typ: Type, context: Context) -> None: self.fail( - f"Can only assign concrete classes to a variable of type {format_type(typ)}", context + f"Can only assign concrete classes to a variable of type {format_type(typ)}", + context, + code=codes.TYPE_ABSTRACT, ) def concrete_only_call(self, typ: Type, context: Context) -> None: self.fail( - f"Only concrete class can be given where {format_type(typ)} is expected", context + f"Only concrete class can be given where {format_type(typ)} is expected", + context, + code=codes.TYPE_ABSTRACT, ) def cannot_use_function_with_type( @@ -1788,7 +1905,7 @@ def impossible_intersection( def report_protocol_problems( self, - subtype: Instance | TupleType | TypedDictType, + subtype: Instance | TupleType | TypedDictType | TypeType | CallableType, supertype: Instance, context: Context, *, @@ -1808,15 +1925,17 @@ def report_protocol_problems( exclusions: dict[type, list[str]] = { TypedDictType: ["typing.Mapping"], TupleType: ["typing.Iterable", "typing.Sequence"], - Instance: [], } - if supertype.type.fullname in exclusions[type(subtype)]: + if supertype.type.fullname in exclusions.get(type(subtype), []): return if any(isinstance(tp, UninhabitedType) for tp in get_proper_types(supertype.args)): # We don't want to add notes for failed inference (e.g. Iterable[]). # This will be only confusing a user even more. return + class_obj = False + is_module = False + skip = [] if isinstance(subtype, TupleType): if not isinstance(subtype.partial_fallback, Instance): return @@ -1825,38 +1944,63 @@ def report_protocol_problems( if not isinstance(subtype.fallback, Instance): return subtype = subtype.fallback + elif isinstance(subtype, TypeType): + if not isinstance(subtype.item, Instance): + return + class_obj = True + subtype = subtype.item + elif isinstance(subtype, CallableType): + if subtype.is_type_obj(): + ret_type = get_proper_type(subtype.ret_type) + if isinstance(ret_type, TupleType): + ret_type = ret_type.partial_fallback + if not isinstance(ret_type, Instance): + return + class_obj = True + subtype = ret_type + else: + subtype = subtype.fallback + skip = ["__call__"] + if subtype.extra_attrs and subtype.extra_attrs.mod_name: + is_module = True # Report missing members - missing = get_missing_protocol_members(subtype, supertype) + missing = get_missing_protocol_members(subtype, supertype, skip=skip) if ( missing - and len(missing) < len(supertype.type.protocol_members) + and (len(missing) < len(supertype.type.protocol_members) or missing == ["__call__"]) and len(missing) <= MAX_ITEMS ): - self.note( - '"{}" is missing following "{}" protocol member{}:'.format( - subtype.type.name, supertype.type.name, plural_s(missing) - ), - context, - code=code, - ) - self.note(", ".join(missing), context, offset=OFFSET, code=code) + if missing == ["__call__"] and class_obj: + self.note( + '"{}" has constructor incompatible with "__call__" of "{}"'.format( + subtype.type.name, supertype.type.name + ), + context, + code=code, + ) + else: + self.note( + '"{}" is missing following "{}" protocol member{}:'.format( + subtype.type.name, supertype.type.name, plural_s(missing) + ), + context, + code=code, + ) + self.note(", ".join(missing), context, offset=OFFSET, code=code) elif len(missing) > MAX_ITEMS or len(missing) == len(supertype.type.protocol_members): # This is an obviously wrong type: too many missing members return # Report member type conflicts - conflict_types = get_conflict_protocol_types(subtype, supertype) + conflict_types = get_conflict_protocol_types(subtype, supertype, class_obj=class_obj) if conflict_types and ( not is_subtype(subtype, erase_type(supertype)) or not subtype.type.defn.type_vars or not supertype.type.defn.type_vars ): - self.note( - f"Following member(s) of {format_type(subtype)} have conflicts:", - context, - code=code, - ) + type_name = format_type(subtype, module_names=True) + self.note(f"Following member(s) of {type_name} have conflicts:", context, code=code) for name, got, exp in conflict_types[:MAX_ITEMS]: exp = get_proper_type(exp) got = get_proper_type(got) @@ -1872,29 +2016,43 @@ def report_protocol_problems( else: self.note("Expected:", context, offset=OFFSET, code=code) if isinstance(exp, CallableType): - self.note(pretty_callable(exp), context, offset=2 * OFFSET, code=code) + self.note( + pretty_callable(exp, skip_self=class_obj or is_module), + context, + offset=2 * OFFSET, + code=code, + ) else: assert isinstance(exp, Overloaded) - self.pretty_overload(exp, context, 2 * OFFSET, code=code) + self.pretty_overload( + exp, context, 2 * OFFSET, code=code, skip_self=class_obj or is_module + ) self.note("Got:", context, offset=OFFSET, code=code) if isinstance(got, CallableType): - self.note(pretty_callable(got), context, offset=2 * OFFSET, code=code) + self.note( + pretty_callable(got, skip_self=class_obj or is_module), + context, + offset=2 * OFFSET, + code=code, + ) else: assert isinstance(got, Overloaded) - self.pretty_overload(got, context, 2 * OFFSET, code=code) + self.pretty_overload( + got, context, 2 * OFFSET, code=code, skip_self=class_obj or is_module + ) self.print_more(conflict_types, context, OFFSET, MAX_ITEMS, code=code) # Report flag conflicts (i.e. settable vs read-only etc.) - conflict_flags = get_bad_protocol_flags(subtype, supertype) + conflict_flags = get_bad_protocol_flags(subtype, supertype, class_obj=class_obj) for name, subflags, superflags in conflict_flags[:MAX_ITEMS]: - if IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags: + if not class_obj and IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags: self.note( "Protocol member {}.{} expected instance variable," " got class variable".format(supertype.type.name, name), context, code=code, ) - if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: + if not class_obj and IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: self.note( "Protocol member {}.{} expected class variable," " got instance variable".format(supertype.type.name, name), @@ -1916,6 +2074,25 @@ def report_protocol_problems( context, code=code, ) + if ( + class_obj + and IS_VAR in superflags + and (IS_VAR in subflags and IS_CLASSVAR not in subflags) + ): + self.note( + "Only class variables allowed for class object access on protocols," + ' {} is an instance variable of "{}"'.format(name, subtype.type.name), + context, + code=code, + ) + if class_obj and IS_CLASSVAR in superflags: + self.note( + "ClassVar protocol member {}.{} can never be matched by a class object".format( + supertype.type.name, name + ), + context, + code=code, + ) self.print_more(conflict_flags, context, OFFSET, MAX_ITEMS, code=code) def pretty_overload( @@ -1927,6 +2104,7 @@ def pretty_overload( add_class_or_static_decorator: bool = False, allow_dups: bool = False, code: ErrorCode | None = None, + skip_self: bool = False, ) -> None: for item in tp.items: self.note("@overload", context, offset=offset, allow_dups=allow_dups, code=code) @@ -1937,7 +2115,11 @@ def pretty_overload( self.note(decorator, context, offset=offset, allow_dups=allow_dups, code=code) self.note( - pretty_callable(item), context, offset=offset, allow_dups=allow_dups, code=code + pretty_callable(item, skip_self=skip_self), + context, + offset=offset, + allow_dups=allow_dups, + code=code, ) def print_more( @@ -1962,10 +2144,9 @@ def try_report_long_tuple_assignment_error( subtype: ProperType, supertype: ProperType, context: Context, - msg: str = message_registry.INCOMPATIBLE_TYPES, + msg: message_registry.ErrorMessage, subtype_label: str | None = None, supertype_label: str | None = None, - code: ErrorCode | None = None, ) -> bool: """Try to generate meaningful error message for very long tuple assignment @@ -1980,26 +2161,25 @@ def try_report_long_tuple_assignment_error( ): lhs_type = supertype.args[0] lhs_types = [lhs_type] * len(subtype.items) - self.generate_incompatible_tuple_error( - lhs_types, subtype.items, context, msg, code - ) + self.generate_incompatible_tuple_error(lhs_types, subtype.items, context, msg) return True elif isinstance(supertype, TupleType) and ( len(subtype.items) > 10 or len(supertype.items) > 10 ): if len(subtype.items) != len(supertype.items): if supertype_label is not None and subtype_label is not None: - error_msg = "{} ({} {}, {} {})".format( - msg, - subtype_label, - self.format_long_tuple_type(subtype), - supertype_label, - self.format_long_tuple_type(supertype), + msg = msg.with_additional_msg( + " ({} {}, {} {})".format( + subtype_label, + self.format_long_tuple_type(subtype), + supertype_label, + self.format_long_tuple_type(supertype), + ) ) - self.fail(error_msg, context, code=code) + self.fail(msg.value, context, code=msg.code) return True self.generate_incompatible_tuple_error( - supertype.items, subtype.items, context, msg, code + supertype.items, subtype.items, context, msg ) return True return False @@ -2019,8 +2199,7 @@ def generate_incompatible_tuple_error( lhs_types: list[Type], rhs_types: list[Type], context: Context, - msg: str = message_registry.INCOMPATIBLE_TYPES, - code: ErrorCode | None = None, + msg: message_registry.ErrorMessage, ) -> None: """Generate error message for individual incompatible tuple pairs""" error_cnt = 0 @@ -2035,14 +2214,15 @@ def generate_incompatible_tuple_error( ) error_cnt += 1 - error_msg = msg + f" ({str(error_cnt)} tuple items are incompatible" + info = f" ({str(error_cnt)} tuple items are incompatible" if error_cnt - 3 > 0: - error_msg += f"; {str(error_cnt - 3)} items are omitted)" + info += f"; {str(error_cnt - 3)} items are omitted)" else: - error_msg += ")" - self.fail(error_msg, context, code=code) + info += ")" + msg = msg.with_additional_msg(info) + self.fail(msg.value, context, code=msg.code) for note in notes: - self.note(note, context, code=code) + self.note(note, context, code=msg.code) def add_fixture_note(self, fullname: str, ctx: Context) -> None: self.note(f'Maybe your test fixture does not define "{fullname}"?', ctx) @@ -2054,12 +2234,21 @@ def add_fixture_note(self, fullname: str, ctx: Context) -> None: ctx, ) + def annotation_in_unchecked_function(self, context: Context) -> None: + self.note( + "By default the bodies of untyped functions are not checked," + " consider using --check-untyped-defs", + context, + code=codes.ANNOTATION_UNCHECKED, + ) + def quote_type_string(type_string: str) -> str: """Quotes a type representation for use in messages.""" no_quote_regex = r"^<(tuple|union): \d+ items>$" if ( type_string in ["Module", "overloaded function", "", ""] + or type_string.startswith("Module ") or re.match(no_quote_regex, type_string) is not None or type_string.endswith("?") ): @@ -2092,7 +2281,9 @@ def format_callable_args( return ", ".join(arg_strings) -def format_type_inner(typ: Type, verbosity: int, fullnames: set[str] | None) -> str: +def format_type_inner( + typ: Type, verbosity: int, fullnames: set[str] | None, module_names: bool = False +) -> str: """ Convert a type to a relatively short string suitable for error messages. @@ -2132,7 +2323,13 @@ def format_literal_value(typ: LiteralType) -> str: # Get the short name of the type. if itype.type.fullname in ("types.ModuleType", "_importlib_modulespec.ModuleType"): # Make some common error messages simpler and tidier. - return "Module" + base_str = "Module" + if itype.extra_attrs and itype.extra_attrs.mod_name and module_names: + return f'{base_str} "{itype.extra_attrs.mod_name}"' + return base_str + if itype.type.fullname == "typing._SpecialForm": + # This is not a real type but used for some typing-related constructs. + return "" if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): base_str = itype.type.fullname else: @@ -2150,9 +2347,14 @@ def format_literal_value(typ: LiteralType) -> str: else: # There are type arguments. Convert the arguments to strings. return f"{base_str}[{format_list(itype.args)}]" + elif isinstance(typ, UnpackType): + return f"Unpack[{format(typ.type)}]" elif isinstance(typ, TypeVarType): # This is similar to non-generic instance types. return typ.name + elif isinstance(typ, TypeVarTupleType): + # This is similar to non-generic instance types. + return typ.name elif isinstance(typ, ParamSpecType): # Concatenate[..., P] if typ.prefix.arg_types: @@ -2306,7 +2508,7 @@ def find_type_overlaps(*types: Type) -> set[str]: return overlaps -def format_type(typ: Type, verbosity: int = 0) -> str: +def format_type(typ: Type, verbosity: int = 0, module_names: bool = False) -> str: """ Convert a type to a relatively short string suitable for error messages. @@ -2317,10 +2519,10 @@ def format_type(typ: Type, verbosity: int = 0) -> str: modification of the formatted string is required, callers should use format_type_bare. """ - return quote_type_string(format_type_bare(typ, verbosity)) + return quote_type_string(format_type_bare(typ, verbosity, module_names)) -def format_type_bare(typ: Type, verbosity: int = 0) -> str: +def format_type_bare(typ: Type, verbosity: int = 0, module_names: bool = False) -> str: """ Convert a type to a relatively short string suitable for error messages. @@ -2332,7 +2534,7 @@ def format_type_bare(typ: Type, verbosity: int = 0) -> str: instead. (The caller may want to use quote_type_string after processing has happened, to maintain consistent quoting in messages.) """ - return format_type_inner(typ, verbosity, find_type_overlaps(typ)) + return format_type_inner(typ, verbosity, find_type_overlaps(typ), module_names) def format_type_distinctly(*types: Type, bare: bool = False) -> tuple[str, ...]: @@ -2370,13 +2572,18 @@ def pretty_class_or_static_decorator(tp: CallableType) -> str | None: return None -def pretty_callable(tp: CallableType) -> str: +def pretty_callable(tp: CallableType, skip_self: bool = False) -> str: """Return a nice easily-readable representation of a callable type. For example: def [T <: int] f(self, x: int, y: T) -> None + + If skip_self is True, print an actual callable type, as it would appear + when bound on an instance/class, rather than how it would appear in the + defining statement. """ s = "" asterisk = False + slash = False for i in range(len(tp.arg_types)): if s: s += ", " @@ -2391,26 +2598,35 @@ def [T <: int] f(self, x: int, y: T) -> None name = tp.arg_names[i] if name: s += name + ": " - s += format_type_bare(tp.arg_types[i]) + type_str = format_type_bare(tp.arg_types[i]) + if tp.arg_kinds[i] == ARG_STAR2 and tp.unpack_kwargs: + type_str = f"Unpack[{type_str}]" + s += type_str if tp.arg_kinds[i].is_optional(): s += " = ..." + if ( + not slash + and tp.arg_kinds[i].is_positional() + and name is None + and ( + i == len(tp.arg_types) - 1 + or (tp.arg_names[i + 1] is not None or not tp.arg_kinds[i + 1].is_positional()) + ) + ): + s += ", /" + slash = True # If we got a "special arg" (i.e: self, cls, etc...), prepend it to the arg list - if ( - isinstance(tp.definition, FuncDef) - and tp.definition.name is not None - and hasattr(tp.definition, "arguments") - ): - definition_args = [arg.variable.name for arg in tp.definition.arguments] + if isinstance(tp.definition, FuncDef) and hasattr(tp.definition, "arguments"): + definition_arg_names = [arg.variable.name for arg in tp.definition.arguments] if ( - definition_args - and tp.arg_names != definition_args - and len(definition_args) > 0 - and definition_args[0] + len(definition_arg_names) > len(tp.arg_names) + and definition_arg_names[0] + and not skip_self ): if s: s = ", " + s - s = definition_args[0] + s + s = definition_arg_names[0] + s s = f"{tp.definition.name}({s})" elif tp.name: first_arg = tp.def_extras.get("first_arg") @@ -2462,19 +2678,23 @@ def variance_string(variance: int) -> str: return "invariant" -def get_missing_protocol_members(left: Instance, right: Instance) -> list[str]: +def get_missing_protocol_members(left: Instance, right: Instance, skip: list[str]) -> list[str]: """Find all protocol members of 'right' that are not implemented (i.e. completely missing) in 'left'. """ assert right.type.is_protocol missing: list[str] = [] for member in right.type.protocol_members: + if member in skip: + continue if not find_member(member, left, left): missing.append(member) return missing -def get_conflict_protocol_types(left: Instance, right: Instance) -> list[tuple[str, Type, Type]]: +def get_conflict_protocol_types( + left: Instance, right: Instance, class_obj: bool = False +) -> list[tuple[str, Type, Type]]: """Find members that are defined in 'left' but have incompatible types. Return them as a list of ('member', 'got', 'expected'). """ @@ -2485,11 +2705,11 @@ def get_conflict_protocol_types(left: Instance, right: Instance) -> list[tuple[s continue supertype = find_member(member, right, left) assert supertype is not None - subtype = find_member(member, left, left) + subtype = find_member(member, left, left, class_obj=class_obj) if not subtype: continue is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True) - if IS_SETTABLE in get_member_flags(member, right.type): + if IS_SETTABLE in get_member_flags(member, right): is_compat = is_compat and is_subtype(supertype, subtype) if not is_compat: conflicts.append((member, subtype, supertype)) @@ -2497,7 +2717,7 @@ def get_conflict_protocol_types(left: Instance, right: Instance) -> list[tuple[s def get_bad_protocol_flags( - left: Instance, right: Instance + left: Instance, right: Instance, class_obj: bool = False ) -> list[tuple[str, set[int], set[int]]]: """Return all incompatible attribute flags for members that are present in both 'left' and 'right'. @@ -2506,23 +2726,25 @@ def get_bad_protocol_flags( all_flags: list[tuple[str, set[int], set[int]]] = [] for member in right.type.protocol_members: if find_member(member, left, left): - item = ( - member, - get_member_flags(member, left.type), - get_member_flags(member, right.type), - ) + item = (member, get_member_flags(member, left), get_member_flags(member, right)) all_flags.append(item) bad_flags = [] for name, subflags, superflags in all_flags: if ( IS_CLASSVAR in subflags and IS_CLASSVAR not in superflags + and IS_SETTABLE in superflags or IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags or IS_SETTABLE in superflags and IS_SETTABLE not in subflags or IS_CLASS_OR_STATIC in superflags and IS_CLASS_OR_STATIC not in subflags + or class_obj + and IS_VAR in superflags + and IS_CLASSVAR not in subflags + or class_obj + and IS_CLASSVAR in superflags ): bad_flags.append((name, subflags, superflags)) return bad_flags @@ -2588,11 +2810,22 @@ def for_function(callee: CallableType) -> str: return "" +def wrong_type_arg_count(n: int, act: str, name: str) -> str: + s = f"{n} type arguments" + if n == 0: + s = "no type arguments" + elif n == 1: + s = "1 type argument" + if act == "0": + act = "none" + return f'"{name}" expects {s}, but {act} given' + + def find_defining_module(modules: dict[str, MypyFile], typ: CallableType) -> MypyFile | None: if not typ.definition: return None fullname = typ.definition.fullname - if fullname is not None and "." in fullname: + if "." in fullname: for i in range(fullname.count(".")): module_name = fullname.rsplit(".", i + 1)[0] try: @@ -2607,11 +2840,24 @@ def find_defining_module(modules: dict[str, MypyFile], typ: CallableType) -> Myp COMMON_MISTAKES: Final[dict[str, Sequence[str]]] = {"add": ("append", "extend")} -def best_matches(current: str, options: Iterable[str]) -> list[str]: - ratios = {v: difflib.SequenceMatcher(a=current, b=v).ratio() for v in options} - return sorted( - (o for o in options if ratios[o] > 0.75), reverse=True, key=lambda v: (ratios[v], v) - ) +def _real_quick_ratio(a: str, b: str) -> float: + # this is an upper bound on difflib.SequenceMatcher.ratio + # similar to difflib.SequenceMatcher.real_quick_ratio, but faster since we don't instantiate + al = len(a) + bl = len(b) + return 2.0 * min(al, bl) / (al + bl) + + +def best_matches(current: str, options: Collection[str], n: int) -> list[str]: + # narrow down options cheaply + assert current + options = [o for o in options if _real_quick_ratio(current, o) > 0.75] + if len(options) >= 50: + options = [o for o in options if abs(len(o) - len(current)) <= 1] + + ratios = {option: difflib.SequenceMatcher(a=current, b=option).ratio() for option in options} + options = [option for option, ratio in ratios.items() if ratio > 0.75] + return sorted(options, key=lambda v: (-ratios[v], v))[:n] def pretty_seq(args: Sequence[str], conjunction: str) -> str: diff --git a/mypy/metastore.py b/mypy/metastore.py index 8a8a3088ca76..16cbd5adc9c8 100644 --- a/mypy/metastore.py +++ b/mypy/metastore.py @@ -185,10 +185,14 @@ def _query(self, name: str, field: str) -> Any: return results[0][0] def getmtime(self, name: str) -> float: - return self._query(name, "mtime") + mtime = self._query(name, "mtime") + assert isinstance(mtime, float) + return mtime def read(self, name: str) -> str: - return self._query(name, "data") + data = self._query(name, "data") + assert isinstance(data, str) + return data def write(self, name: str, data: str, mtime: float | None = None) -> bool: import sqlite3 diff --git a/mypy/mixedtraverser.py b/mypy/mixedtraverser.py index d25e9b9b0137..771f87fc6bd6 100644 --- a/mypy/mixedtraverser.py +++ b/mypy/mixedtraverser.py @@ -25,6 +25,9 @@ class MixedTraverserVisitor(TraverserVisitor, TypeTraverserVisitor): """Recursive traversal of both Node and Type objects.""" + def __init__(self) -> None: + self.in_type_alias_expr = False + # Symbol nodes def visit_var(self, var: Var) -> None: @@ -45,7 +48,9 @@ def visit_class_def(self, o: ClassDef) -> None: def visit_type_alias_expr(self, o: TypeAliasExpr) -> None: super().visit_type_alias_expr(o) + self.in_type_alias_expr = True o.type.accept(self) + self.in_type_alias_expr = False def visit_type_var_expr(self, o: TypeVarExpr) -> None: super().visit_type_var_expr(o) diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index aaa8216ae435..265d76ed5bb6 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -28,7 +28,7 @@ from mypy.fscache import FileSystemCache from mypy.nodes import MypyFile from mypy.options import Options -from mypy.stubinfo import is_legacy_bundled_package +from mypy.stubinfo import approved_stub_package_exists # Paths to be searched in find_module(). @@ -89,9 +89,7 @@ def error_message_templates(self, daemon: bool) -> tuple[str, list[str]]: ) notes = [doc_link] elif self is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: - msg = ( - 'Library stubs not installed for "{module}" (or incompatible with Python {pyver})' - ) + msg = 'Library stubs not installed for "{module}"' notes = ['Hint: "python3 -m pip install {stub_dist}"'] if not daemon: notes.append( @@ -117,15 +115,19 @@ def __init__( module: str | None, text: str | None = None, base_dir: str | None = None, + followed: bool = False, ) -> None: self.path = path # File where it's found (e.g. 'xxx/yyy/foo/bar.py') self.module = module or "__main__" # Module name (e.g. 'foo.bar') self.text = text # Source code, if initially supplied, else None self.base_dir = base_dir # Directory where the package is rooted (e.g. 'xxx/yyy') + self.followed = followed # Was this found by following imports? def __repr__(self) -> str: - return "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r})".format( - self.path, self.module, self.text is not None, self.base_dir + return ( + "BuildSource(path={!r}, module={!r}, has_text={}, base_dir={!r}, followed={})".format( + self.path, self.module, self.text is not None, self.base_dir, self.followed + ) ) @@ -146,14 +148,11 @@ def __init__(self, sources: list[BuildSource]) -> None: self.source_modules[source.module] = source.path or "" def is_source(self, file: MypyFile) -> bool: - if file.path and file.path in self.source_paths: - return True - elif file._fullname in self.source_modules: - return True - elif self.source_text_present: - return True - else: - return False + return ( + (file.path and file.path in self.source_paths) + or file._fullname in self.source_modules + or self.source_text_present + ) class FindModuleCache: @@ -338,13 +337,13 @@ def _find_module_non_stub_helper( # If this is not a directory then we can't traverse further into it if not self.fscache.isdir(dir_path): break - if is_legacy_bundled_package(components[0]): + if approved_stub_package_exists(components[0]): if len(components) == 1 or ( self.find_module(components[0]) is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED ): return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED - if is_legacy_bundled_package(".".join(components[:2])): + if approved_stub_package_exists(".".join(components[:2])): return ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED if plausible_match: return ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS @@ -508,7 +507,11 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: # In namespace mode, register a potential namespace package if self.options and self.options.namespace_packages: - if fscache.exists_case(base_path, dir_prefix) and not has_init: + if ( + not has_init + and fscache.exists_case(base_path, dir_prefix) + and not fscache.isfile_case(base_path, dir_prefix) + ): near_misses.append((base_path, dir_prefix)) # No package, look for module. @@ -571,11 +574,11 @@ def _is_compatible_stub_package(self, stub_dir: str) -> bool: whether the stubs are compatible with Python 2 and 3. """ metadata_fnam = os.path.join(stub_dir, "METADATA.toml") - if os.path.isfile(metadata_fnam): - with open(metadata_fnam, "rb") as f: - metadata = tomllib.load(f) - return bool(metadata.get("python3", True)) - return True + if not os.path.isfile(metadata_fnam): + return True + with open(metadata_fnam, "rb") as f: + metadata = tomllib.load(f) + return bool(metadata.get("python3", True)) def find_modules_recursive(self, module: str) -> list[BuildSource]: module_path = self.find_module(module) diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index feca1f43abf2..b383fc9dc145 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -36,7 +36,7 @@ def is_c_module(module: ModuleType) -> bool: # Could be a namespace package. These must be handled through # introspection, since there is no source file. return True - return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd"] + return os.path.splitext(module.__dict__["__file__"])[-1] in [".so", ".pyd", ".dll"] class InspectError(Exception): diff --git a/mypy/mro.py b/mypy/mro.py index 912cf3e2e341..cc9f88a9d045 100644 --- a/mypy/mro.py +++ b/mypy/mro.py @@ -4,7 +4,7 @@ from mypy.nodes import TypeInfo from mypy.types import Instance -from mypy.typestate import TypeState +from mypy.typestate import type_state def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None) -> None: @@ -17,7 +17,7 @@ def calculate_mro(info: TypeInfo, obj_type: Callable[[], Instance] | None = None info.mro = mro # The property of falling back to Any is inherited. info.fallback_to_any = any(baseinfo.fallback_to_any for baseinfo in info.mro) - TypeState.reset_all_subtype_caches_for(info) + type_state.reset_all_subtype_caches_for(info) class MroError(Exception): diff --git a/mypy/nodes.py b/mypy/nodes.py index 765feb171b9b..9247d391bc96 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -10,9 +10,9 @@ TYPE_CHECKING, Any, Callable, - DefaultDict, Dict, Iterator, + List, Optional, Sequence, Tuple, @@ -25,7 +25,6 @@ from mypy_extensions import trait import mypy.strconv -from mypy.bogus_type import Bogus from mypy.util import short_type from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor @@ -72,14 +71,6 @@ def set_line( if end_column is not None: self.end_column = end_column - def get_line(self) -> int: - """Don't use. Use x.line.""" - return self.line - - def get_column(self) -> int: - """Don't use. Use x.column.""" - return self.column - if TYPE_CHECKING: # break import cycle only needed for mypy @@ -255,12 +246,10 @@ class SymbolNode(Node): def name(self) -> str: pass - # fullname can often be None even though the type system - # disagrees. We mark this with Bogus to let mypyc know not to - # worry about it. + # Fully qualified name @property @abstractmethod - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: pass @abstractmethod @@ -299,14 +288,16 @@ class MypyFile(SymbolNode): "future_import_flags", ) + __match_args__ = ("name", "path", "defs") + # Fully qualified module name - _fullname: Bogus[str] + _fullname: str # Path to the file (empty string if not known) path: str # Top-level definitions and statements defs: list[Statement] # Type alias dependencies as mapping from target to set of alias full names - alias_deps: DefaultDict[str, set[str]] + alias_deps: defaultdict[str, set[str]] # Is there a UTF-8 BOM at the start? is_bom: bool names: SymbolTable @@ -367,7 +358,7 @@ def name(self) -> str: return "" if not self._fullname else self._fullname.split(".")[-1] @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: @@ -434,6 +425,8 @@ class Import(ImportBase): __slots__ = ("ids",) + __match_args__ = ("ids",) + ids: list[tuple[str, str | None]] # (module id, as id) def __init__(self, ids: list[tuple[str, str | None]]) -> None: @@ -449,6 +442,8 @@ class ImportFrom(ImportBase): __slots__ = ("id", "names", "relative") + __match_args__ = ("id", "names", "relative") + id: str relative: int names: list[tuple[str, str | None]] # Tuples (name, as name) @@ -466,60 +461,22 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ImportAll(ImportBase): """from m import *""" - __slots__ = ("id", "relative", "imported_names") + __slots__ = ("id", "relative") + + __match_args__ = ("id", "relative") id: str relative: int - # NOTE: Only filled and used by old semantic analyzer. - imported_names: list[str] def __init__(self, id: str, relative: int) -> None: super().__init__() self.id = id self.relative = relative - self.imported_names = [] def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_import_all(self) -class ImportedName(SymbolNode): - """Indirect reference to a fullname stored in symbol table. - - This node is not present in the original program as such. This is - just a temporary artifact in binding imported names. After semantic - analysis pass 2, these references should be replaced with direct - reference to a real AST node. - - Note that this is neither a Statement nor an Expression so this - can't be visited. - """ - - __slots__ = ("target_fullname",) - - def __init__(self, target_fullname: str) -> None: - super().__init__() - self.target_fullname = target_fullname - - @property - def name(self) -> str: - return self.target_fullname.split(".")[-1] - - @property - def fullname(self) -> str: - return self.target_fullname - - def serialize(self) -> JsonDict: - assert False, "ImportedName leaked from semantic analysis" - - @classmethod - def deserialize(cls, data: JsonDict) -> ImportedName: - assert False, "ImportedName should never be serialized" - - def __str__(self) -> str: - return f"ImportedName({self.target_fullname})" - - FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] @@ -563,8 +520,7 @@ def __init__(self) -> None: self.is_static = False self.is_final = False # Name with module prefix - # TODO: Type should be Optional[str] - self._fullname = cast(Bogus[str], None) + self._fullname = "" @property @abstractmethod @@ -572,7 +528,7 @@ def name(self) -> str: pass @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname @@ -653,6 +609,8 @@ class Argument(Node): __slots__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + __match_args__ = ("variable", "type_annotation", "initializer", "kind", "pos_only") + def __init__( self, variable: Var, @@ -743,23 +701,16 @@ def __init__( def max_fixed_argc(self) -> int: return self.max_pos - def set_line( - self, - target: Context | int, - column: int | None = None, - end_line: int | None = None, - end_column: int | None = None, - ) -> None: - super().set_line(target, column, end_line, end_column) - for arg in self.arguments: - # TODO: set arguments line/column to their precise locations. - arg.set_line(self.line, self.column, self.end_line, end_column) - def is_dynamic(self) -> bool: return self.type is None -FUNCDEF_FLAGS: Final = FUNCITEM_FLAGS + ["is_decorated", "is_conditional"] +FUNCDEF_FLAGS: Final = FUNCITEM_FLAGS + [ + "is_decorated", + "is_conditional", + "is_trivial_body", + "is_mypy_only", +] # Abstract status of a function NOT_ABSTRACT: Final = 0 @@ -782,8 +733,14 @@ class FuncDef(FuncItem, SymbolNode, Statement): "abstract_status", "original_def", "deco_line", + "is_trivial_body", + "is_mypy_only", + # Present only when a function is decorated with @typing.datasclass_transform or similar + "dataclass_transform_spec", ) + __match_args__ = ("name", "arguments", "type", "body") + # Note that all __init__ args must have default values def __init__( self, @@ -797,11 +754,17 @@ def __init__( self.is_decorated = False self.is_conditional = False # Defined conditionally (within block)? self.abstract_status = NOT_ABSTRACT + # Is this an abstract method with trivial body? + # Such methods can't be called via super(). + self.is_trivial_body = False self.is_final = False # Original conditional definition self.original_def: None | FuncDef | Var | Decorator = None - # Used for error reporting (to keep backwad compatibility with pre-3.8) + # Used for error reporting (to keep backward compatibility with pre-3.8) self.deco_line: int | None = None + # Definitions that appear in if TYPE_CHECKING are marked with this flag. + self.is_mypy_only = False + self.dataclass_transform_spec: DataclassTransformSpec | None = None @property def name(self) -> str: @@ -827,6 +790,11 @@ def serialize(self) -> JsonDict: "flags": get_flags(self, FUNCDEF_FLAGS), "abstract_status": self.abstract_status, # TODO: Do we need expanded, original_def? + "dataclass_transform_spec": ( + None + if self.dataclass_transform_spec is None + else self.dataclass_transform_spec.serialize() + ), } @classmethod @@ -849,6 +817,11 @@ def deserialize(cls, data: JsonDict) -> FuncDef: ret.arg_names = data["arg_names"] ret.arg_kinds = [ArgKind(x) for x in data["arg_kinds"]] ret.abstract_status = data["abstract_status"] + ret.dataclass_transform_spec = ( + DataclassTransformSpec.deserialize(data["dataclass_transform_spec"]) + if data["dataclass_transform_spec"] is not None + else None + ) # Leave these uninitialized so that future uses will trigger an error del ret.arguments del ret.max_pos @@ -869,6 +842,8 @@ class Decorator(SymbolNode, Statement): __slots__ = ("func", "decorators", "original_decorators", "var", "is_overload") + __match_args__ = ("decorators", "var", "func") + func: FuncDef # Decorated function decorators: list[Expression] # Decorators (may be empty) # Some decorators are removed by semanal, keep the original here. @@ -890,7 +865,7 @@ def name(self) -> str: return self.func.name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self.func.fullname @property @@ -940,6 +915,7 @@ def deserialize(cls, data: JsonDict) -> Decorator: "explicit_self_type", "is_ready", "is_inferred", + "invalid_partial_type", "from_module_getattr", "has_explicit_value", "allow_incompatible_override", @@ -976,13 +952,16 @@ class Var(SymbolNode): "from_module_getattr", "has_explicit_value", "allow_incompatible_override", + "invalid_partial_type", ) + __match_args__ = ("name", "type", "final_value") + def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: super().__init__() self._name = name # Name without module prefix # TODO: Should be Optional[str] - self._fullname = cast("Bogus[str]", None) # Name with module prefix + self._fullname = "" # Name with module prefix # TODO: Should be Optional[TypeInfo] self.info = VAR_NO_INFO self.type: mypy.types.Type | None = type # Declared or inferred type, or None @@ -1025,13 +1004,16 @@ def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: self.has_explicit_value = False # If True, subclasses can override this with an incompatible type. self.allow_incompatible_override = False + # If True, this means we didn't manage to infer full type and fall back to + # something like list[Any]. We may decide to not use such types as context. + self.invalid_partial_type = False @property def name(self) -> str: return self._name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def accept(self, visitor: NodeVisitor[T]) -> T: @@ -1069,7 +1051,7 @@ class ClassDef(Statement): __slots__ = ( "name", - "fullname", + "_fullname", "defs", "type_vars", "base_type_exprs", @@ -1081,10 +1063,13 @@ class ClassDef(Statement): "analyzed", "has_incompatible_baseclass", "deco_line", + "removed_statements", ) + __match_args__ = ("name", "defs") + name: str # Name of the class without module prefix - fullname: Bogus[str] # Fully qualified name of the class + _fullname: str # Fully qualified name of the class defs: Block type_vars: list[mypy.types.TypeVarLikeType] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) @@ -1097,6 +1082,8 @@ class ClassDef(Statement): keywords: dict[str, Expression] analyzed: Expression | None has_incompatible_baseclass: bool + # Used by special forms like NamedTuple and TypedDict to store invalid statements + removed_statements: list[Statement] def __init__( self, @@ -1109,7 +1096,7 @@ def __init__( ) -> None: super().__init__() self.name = name - self.fullname = None # type: ignore + self._fullname = "" self.defs = defs self.type_vars = type_vars or [] self.base_type_exprs = base_type_exprs or [] @@ -1122,6 +1109,15 @@ def __init__( self.has_incompatible_baseclass = False # Used for error reporting (to keep backwad compatibility with pre-3.8) self.deco_line: int | None = None + self.removed_statements = [] + + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_class_def(self) @@ -1160,6 +1156,8 @@ class GlobalDecl(Statement): __slots__ = ("names",) + __match_args__ = ("names",) + names: list[str] def __init__(self, names: list[str]) -> None: @@ -1175,6 +1173,8 @@ class NonlocalDecl(Statement): __slots__ = ("names",) + __match_args__ = ("names",) + names: list[str] def __init__(self, names: list[str]) -> None: @@ -1188,6 +1188,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class Block(Statement): __slots__ = ("body", "is_unreachable") + __match_args__ = ("body", "is_unreachable") + def __init__(self, body: list[Statement]) -> None: super().__init__() self.body = body @@ -1210,6 +1212,8 @@ class ExpressionStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -1242,6 +1246,8 @@ class AssignmentStmt(Statement): "invalid_recursive_alias", ) + __match_args__ = ("lvalues", "rvalues", "type") + lvalues: list[Lvalue] # This is a TempNode if and only if no rvalue (x: t). rvalue: Expression @@ -1290,6 +1296,8 @@ class OperatorAssignmentStmt(Statement): __slots__ = ("op", "lvalue", "rvalue") + __match_args__ = ("lvalue", "op", "rvalue") + op: str # TODO: Enum? lvalue: Lvalue rvalue: Expression @@ -1307,6 +1315,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WhileStmt(Statement): __slots__ = ("expr", "body", "else_body") + __match_args__ = ("expr", "body", "else_body") + expr: Expression body: Block else_body: Block | None @@ -1334,6 +1344,8 @@ class ForStmt(Statement): "is_async", ) + __match_args__ = ("index", "index_type", "expr", "body", "else_body") + # Index variables index: Lvalue # Type given by type comments for index, can be None @@ -1376,6 +1388,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ReturnStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression | None def __init__(self, expr: Expression | None) -> None: @@ -1389,6 +1403,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class AssertStmt(Statement): __slots__ = ("expr", "msg") + __match_args__ = ("expr", "msg") + expr: Expression msg: Expression | None @@ -1404,6 +1420,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class DelStmt(Statement): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Lvalue def __init__(self, expr: Lvalue) -> None: @@ -1438,6 +1456,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class IfStmt(Statement): __slots__ = ("expr", "body", "else_body") + __match_args__ = ("expr", "body", "else_body") + expr: list[Expression] body: list[Block] else_body: Block | None @@ -1455,6 +1475,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class RaiseStmt(Statement): __slots__ = ("expr", "from_expr") + __match_args__ = ("expr", "from_expr") + # Plain 'raise' is a valid statement. expr: Expression | None from_expr: Expression | None @@ -1469,7 +1491,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class TryStmt(Statement): - __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body") + __slots__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") + + __match_args__ = ("body", "types", "vars", "handlers", "else_body", "finally_body", "is_star") body: Block # Try body # Plain 'except:' also possible @@ -1478,6 +1502,8 @@ class TryStmt(Statement): handlers: list[Block] # Except bodies else_body: Block | None finally_body: Block | None + # Whether this is try ... except* (added in Python 3.11) + is_star: bool def __init__( self, @@ -1495,6 +1521,7 @@ def __init__( self.handlers = handlers self.else_body = else_body self.finally_body = finally_body + self.is_star = False def accept(self, visitor: StatementVisitor[T]) -> T: return visitor.visit_try_stmt(self) @@ -1503,6 +1530,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WithStmt(Statement): __slots__ = ("expr", "target", "unanalyzed_type", "analyzed_types", "body", "is_async") + __match_args__ = ("expr", "target", "body") + expr: list[Expression] target: list[Lvalue | None] # Type given by type comments for target, can be None @@ -1532,6 +1561,10 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class MatchStmt(Statement): + __slots__ = ("subject", "patterns", "guards", "bodies") + + __match_args__ = ("subject", "patterns", "guards", "bodies") + subject: Expression patterns: list[Pattern] guards: list[Expression | None] @@ -1563,6 +1596,8 @@ class IntExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: int # 0 by default def __init__(self, value: int) -> None: @@ -1584,6 +1619,8 @@ class StrExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: str # '' by default def __init__(self, value: str) -> None: @@ -1599,6 +1636,8 @@ class BytesExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + # Note: we deliberately do NOT use bytes here because it ends up # unnecessarily complicating a lot of the result logic. For example, # we'd have to worry about converting the bytes into a format we can @@ -1623,6 +1662,8 @@ class FloatExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: float # 0.0 by default def __init__(self, value: float) -> None: @@ -1638,6 +1679,8 @@ class ComplexExpr(Expression): __slots__ = ("value",) + __match_args__ = ("value",) + value: complex def __init__(self, value: complex) -> None: @@ -1662,6 +1705,8 @@ class StarExpr(Expression): __slots__ = ("expr", "valid") + __match_args__ = ("expr", "valid") + expr: Expression valid: bool @@ -1682,7 +1727,7 @@ class RefExpr(Expression): __slots__ = ( "kind", "node", - "fullname", + "_fullname", "is_new_def", "is_inferred_def", "is_alias_rvalue", @@ -1696,7 +1741,7 @@ def __init__(self) -> None: # Var, FuncDef or TypeInfo that describes this self.node: SymbolNode | None = None # Fully qualified name (or name if not global) - self.fullname: str | None = None + self._fullname = "" # Does this define a new name? self.is_new_def = False # Does this define a new name with inferred type? @@ -1709,6 +1754,14 @@ def __init__(self) -> None: # Cache type guard from callable_type.type_guard self.type_guard: mypy.types.Type | None = None + @property + def fullname(self) -> str: + return self._fullname + + @fullname.setter + def fullname(self, v: str) -> None: + self._fullname = v + class NameExpr(RefExpr): """Name expression @@ -1718,9 +1771,11 @@ class NameExpr(RefExpr): __slots__ = ("name", "is_special_form") + __match_args__ = ("name", "node") + def __init__(self, name: str) -> None: super().__init__() - self.name = name # Name referred to (may be qualified) + self.name = name # Name referred to # Is this a l.h.s. of a special form assignment like typed dict or type variable? self.is_special_form = False @@ -1736,6 +1791,8 @@ class MemberExpr(RefExpr): __slots__ = ("expr", "name", "def_var") + __match_args__ = ("expr", "name", "node") + def __init__(self, expr: Expression, name: str) -> None: super().__init__() self.expr = expr @@ -1797,6 +1854,8 @@ class CallExpr(Expression): __slots__ = ("callee", "args", "arg_kinds", "arg_names", "analyzed") + __match_args__ = ("callee", "args", "arg_kinds", "arg_names") + def __init__( self, callee: Expression, @@ -1825,6 +1884,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldFromExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -1838,6 +1899,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression | None def __init__(self, expr: Expression | None) -> None: @@ -1856,6 +1919,8 @@ class IndexExpr(Expression): __slots__ = ("base", "index", "method_type", "analyzed") + __match_args__ = ("base", "index") + base: Expression index: Expression # Inferred __getitem__ method type @@ -1880,6 +1945,8 @@ class UnaryExpr(Expression): __slots__ = ("op", "expr", "method_type") + __match_args__ = ("op", "expr") + op: str # TODO: Enum? expr: Expression # Inferred operator method type @@ -1900,6 +1967,8 @@ class AssignmentExpr(Expression): __slots__ = ("target", "value") + __match_args__ = ("target", "value") + def __init__(self, target: Expression, value: Expression) -> None: super().__init__() self.target = target @@ -1910,10 +1979,22 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class OpExpr(Expression): - """Binary operation (other than . or [] or comparison operators, - which have specific nodes).""" + """Binary operation. + + The dot (.), [] and comparison operators have more specific nodes. + """ + + __slots__ = ( + "op", + "left", + "right", + "method_type", + "right_always", + "right_unreachable", + "analyzed", + ) - __slots__ = ("op", "left", "right", "method_type", "right_always", "right_unreachable") + __match_args__ = ("left", "op", "right") op: str # TODO: Enum? left: Expression @@ -1924,8 +2005,12 @@ class OpExpr(Expression): right_always: bool # Per static analysis only: Is the right side unreachable? right_unreachable: bool + # Used for expressions that represent a type "X | Y" in some contexts + analyzed: TypeAliasExpr | None - def __init__(self, op: str, left: Expression, right: Expression) -> None: + def __init__( + self, op: str, left: Expression, right: Expression, analyzed: TypeAliasExpr | None = None + ) -> None: super().__init__() self.op = op self.left = left @@ -1933,6 +2018,7 @@ def __init__(self, op: str, left: Expression, right: Expression) -> None: self.method_type = None self.right_always = False self.right_unreachable = False + self.analyzed = analyzed def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_op_expr(self) @@ -1943,6 +2029,8 @@ class ComparisonExpr(Expression): __slots__ = ("operators", "operands", "method_types") + __match_args__ = ("operands", "operators") + operators: list[str] operands: list[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). @@ -1973,6 +2061,8 @@ class SliceExpr(Expression): __slots__ = ("begin_index", "end_index", "stride") + __match_args__ = ("begin_index", "end_index", "stride") + begin_index: Expression | None end_index: Expression | None stride: Expression | None @@ -1997,6 +2087,8 @@ class CastExpr(Expression): __slots__ = ("expr", "type") + __match_args__ = ("expr", "type") + expr: Expression type: mypy.types.Type @@ -2014,6 +2106,8 @@ class AssertTypeExpr(Expression): __slots__ = ("expr", "type") + __match_args__ = ("expr", "type") + expr: Expression type: mypy.types.Type @@ -2031,6 +2125,8 @@ class RevealExpr(Expression): __slots__ = ("expr", "kind", "local_nodes") + __match_args__ = ("expr", "kind", "local_nodes") + expr: Expression | None kind: int local_nodes: list[Var] | None @@ -2052,6 +2148,8 @@ class SuperExpr(Expression): __slots__ = ("name", "info", "call") + __match_args__ = ("name", "call", "info") + name: str info: TypeInfo | None # Type that contains this super expression call: CallExpr # The expression super(...) @@ -2069,6 +2167,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class LambdaExpr(FuncItem, Expression): """Lambda expression""" + __match_args__ = ("arguments", "arg_names", "arg_kinds", "body") + @property def name(self) -> str: return "" @@ -2092,6 +2192,8 @@ class ListExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2107,6 +2209,8 @@ class DictExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[tuple[Expression | None, Expression]] def __init__(self, items: list[tuple[Expression | None, Expression]]) -> None: @@ -2124,6 +2228,8 @@ class TupleExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2139,6 +2245,8 @@ class SetExpr(Expression): __slots__ = ("items",) + __match_args__ = ("items",) + items: list[Expression] def __init__(self, items: list[Expression]) -> None: @@ -2154,6 +2262,8 @@ class GeneratorExpr(Expression): __slots__ = ("left_expr", "sequences", "condlists", "is_async", "indices") + __match_args__ = ("left_expr", "indices", "sequences", "condlists") + left_expr: Expression sequences: list[Expression] condlists: list[list[Expression]] @@ -2184,6 +2294,8 @@ class ListComprehension(Expression): __slots__ = ("generator",) + __match_args__ = ("generator",) + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: @@ -2199,6 +2311,8 @@ class SetComprehension(Expression): __slots__ = ("generator",) + __match_args__ = ("generator",) + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: @@ -2214,6 +2328,8 @@ class DictionaryComprehension(Expression): __slots__ = ("key", "value", "sequences", "condlists", "is_async", "indices") + __match_args__ = ("key", "value", "indices", "sequences", "condlists") + key: Expression value: Expression sequences: list[Expression] @@ -2247,6 +2363,8 @@ class ConditionalExpr(Expression): __slots__ = ("cond", "if_expr", "else_expr") + __match_args__ = ("if_expr", "cond", "else_expr") + cond: Expression if_expr: Expression else_expr: Expression @@ -2266,6 +2384,8 @@ class TypeApplication(Expression): __slots__ = ("expr", "types") + __match_args__ = ("expr", "types") + expr: Expression types: list[mypy.types.Type] @@ -2343,6 +2463,8 @@ class TypeVarExpr(TypeVarLikeExpr): __slots__ = ("values",) + __match_args__ = ("name", "values", "upper_bound") + # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. values: list[mypy.types.Type] @@ -2386,6 +2508,8 @@ def deserialize(cls, data: JsonDict) -> TypeVarExpr: class ParamSpecExpr(TypeVarLikeExpr): __slots__ = () + __match_args__ = ("name", "upper_bound") + def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_paramspec_expr(self) @@ -2412,7 +2536,22 @@ def deserialize(cls, data: JsonDict) -> ParamSpecExpr: class TypeVarTupleExpr(TypeVarLikeExpr): """Type variable tuple expression TypeVarTuple(...).""" - __slots__ = () + __slots__ = "tuple_fallback" + + tuple_fallback: mypy.types.Instance + + __match_args__ = ("name", "upper_bound") + + def __init__( + self, + name: str, + fullname: str, + upper_bound: mypy.types.Type, + tuple_fallback: mypy.types.Instance, + variance: int = INVARIANT, + ) -> None: + super().__init__(name, fullname, upper_bound, variance) + self.tuple_fallback = tuple_fallback def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_type_var_tuple_expr(self) @@ -2423,6 +2562,7 @@ def serialize(self) -> JsonDict: "name": self._name, "fullname": self._fullname, "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), "variance": self.variance, } @@ -2433,6 +2573,7 @@ def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: data["name"], data["fullname"], mypy.types.deserialize_type(data["upper_bound"]), + mypy.types.Instance.deserialize(data["tuple_fallback"]), data["variance"], ) @@ -2442,9 +2583,11 @@ class TypeAliasExpr(Expression): __slots__ = ("type", "tvars", "no_args", "node") + __match_args__ = ("type", "tvars", "no_args", "node") + # The target type. type: mypy.types.Type - # Names of unbound type variables used to define the alias + # Names of type variables used to define the alias tvars: list[str] # Whether this alias was defined in bare form. Used to distinguish # between @@ -2457,7 +2600,7 @@ class TypeAliasExpr(Expression): def __init__(self, node: TypeAlias) -> None: super().__init__() self.type = node.target - self.tvars = node.alias_tvars + self.tvars = [v.name for v in node.alias_tvars] self.no_args = node.no_args self.node = node @@ -2470,6 +2613,8 @@ class NamedTupleExpr(Expression): __slots__ = ("info", "is_typed") + __match_args__ = ("info",) + # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) info: TypeInfo @@ -2489,6 +2634,8 @@ class TypedDictExpr(Expression): __slots__ = ("info",) + __match_args__ = ("info",) + # The class representation of this typed dict info: TypeInfo @@ -2505,6 +2652,8 @@ class EnumCallExpr(Expression): __slots__ = ("info", "items", "values") + __match_args__ = ("info", "items", "values") + # The class representation of this enumerated type info: TypeInfo # The item names (for debugging) @@ -2526,9 +2675,9 @@ class PromoteExpr(Expression): __slots__ = ("type",) - type: mypy.types.Type + type: mypy.types.ProperType - def __init__(self, type: mypy.types.Type) -> None: + def __init__(self, type: mypy.types.ProperType) -> None: super().__init__() self.type = type @@ -2541,6 +2690,8 @@ class NewTypeExpr(Expression): __slots__ = ("name", "old_type", "info") + __match_args__ = ("name", "old_type", "info") + name: str # The base type (the second argument to NewType) old_type: mypy.types.Type | None @@ -2564,6 +2715,8 @@ class AwaitExpr(Expression): __slots__ = ("expr",) + __match_args__ = ("expr",) + expr: Expression def __init__(self, expr: Expression) -> None: @@ -2645,6 +2798,7 @@ class is generic then it will be a type constructor of higher kind. "inferring", "is_enum", "fallback_to_any", + "meta_fallback_to_any", "type_vars", "has_param_spec_type", "bases", @@ -2660,9 +2814,11 @@ class is generic then it will be a type constructor of higher kind. "has_type_var_tuple_type", "type_var_tuple_prefix", "type_var_tuple_suffix", + "self_type", + "dataclass_transform_spec", ) - _fullname: Bogus[str] # Fully qualified name + _fullname: str # Fully qualified name # Fully qualified name for the module this type was defined in. This # information is also in the fullname, but is harder to extract in the # case of nested class definitions. @@ -2738,6 +2894,10 @@ class is generic then it will be a type constructor of higher kind. # (and __setattr__), but without the __getattr__ method. fallback_to_any: bool + # Same as above but for cases where metaclass has type Any. This will suppress + # all attribute errors only for *class object* access. + meta_fallback_to_any: bool + # Information related to type annotations. # Generic type variable names (full names) @@ -2753,7 +2913,7 @@ class is generic then it will be a type constructor of higher kind. # even though it's not a subclass in Python. The non-standard # `@_promote` decorator introduces this, and there are also # several builtin examples, in particular `int` -> `float`. - _promote: list[mypy.types.Type] + _promote: list[mypy.types.ProperType] # This is used for promoting native integer types such as 'i64' to # 'int'. (_promote is used for the other direction.) This only @@ -2763,7 +2923,7 @@ class is generic then it will be a type constructor of higher kind. # This results in some unintuitive results, such as that even # though i64 is compatible with int and int is compatible with # float, i64 is *not* compatible with float. - alt_promote: TypeInfo | None + alt_promote: mypy.types.Instance | None # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type @@ -2800,10 +2960,17 @@ class is generic then it will be a type constructor of higher kind. # in case we are doing multiple semantic analysis passes. special_alias: TypeAlias | None + # Shared type variable for typing.Self in this class (if used, otherwise None). + self_type: mypy.types.TypeVarType | None + + # Added if the corresponding class is directly decorated with `typing.dataclass_transform` + dataclass_transform_spec: DataclassTransformSpec | None + FLAGS: Final = [ "is_abstract", "is_enum", "fallback_to_any", + "meta_fallback_to_any", "is_named_tuple", "is_newtype", "is_protocol", @@ -2843,6 +3010,7 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_final = False self.is_enum = False self.fallback_to_any = False + self.meta_fallback_to_any = False self._promote = [] self.alt_promote = None self.tuple_type = None @@ -2852,8 +3020,11 @@ def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None self.is_newtype = False self.is_intersection = False self.metadata = {} + self.self_type = None + self.dataclass_transform_spec = None def add_type_vars(self) -> None: + self.has_type_var_tuple_type = False if self.defn.type_vars: for i, vd in enumerate(self.defn.type_vars): if isinstance(vd, mypy.types.ParamSpecType): @@ -2874,7 +3045,7 @@ def name(self) -> str: return self.defn.name @property - def fullname(self) -> Bogus[str]: + def fullname(self) -> str: return self._fullname def is_generic(self) -> bool: @@ -2902,7 +3073,10 @@ def protocol_members(self) -> list[str]: assert self.mro, "This property can be only accessed after MRO is (re-)calculated" for base in self.mro[:-1]: # we skip "object" since everyone implements it if base.is_protocol: - for name in base.names: + for name, node in base.names.items(): + if isinstance(node.node, (TypeAlias, TypeVarExpr)): + # These are auxiliary definitions (and type aliases are prohibited). + continue members.add(name) return sorted(list(members)) @@ -3051,6 +3225,7 @@ def serialize(self) -> JsonDict: "bases": [b.serialize() for b in self.bases], "mro": [c.fullname for c in self.mro], "_promote": [p.serialize() for p in self._promote], + "alt_promote": None if self.alt_promote is None else self.alt_promote.serialize(), "declared_metaclass": ( None if self.declared_metaclass is None else self.declared_metaclass.serialize() ), @@ -3065,6 +3240,12 @@ def serialize(self) -> JsonDict: "metadata": self.metadata, "slots": list(sorted(self.slots)) if self.slots is not None else None, "deletable_attributes": self.deletable_attributes, + "self_type": self.self_type.serialize() if self.self_type is not None else None, + "dataclass_transform_spec": ( + self.dataclass_transform_spec.serialize() + if self.dataclass_transform_spec is not None + else None + ), } return data @@ -3080,7 +3261,17 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: ti.type_vars = data["type_vars"] ti.has_param_spec_type = data["has_param_spec_type"] ti.bases = [mypy.types.Instance.deserialize(b) for b in data["bases"]] - ti._promote = [mypy.types.deserialize_type(p) for p in data["_promote"]] + _promote = [] + for p in data["_promote"]: + t = mypy.types.deserialize_type(p) + assert isinstance(t, mypy.types.ProperType) + _promote.append(t) + ti._promote = _promote + ti.alt_promote = ( + None + if data["alt_promote"] is None + else mypy.types.Instance.deserialize(data["alt_promote"]) + ) ti.declared_metaclass = ( None if data["declared_metaclass"] is None @@ -3116,6 +3307,12 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: ti.slots = set(data["slots"]) if data["slots"] is not None else None ti.deletable_attributes = data["deletable_attributes"] set_flags(ti, data["flags"]) + st = data["self_type"] + ti.self_type = mypy.types.TypeVarType.deserialize(st) if st is not None else None + if data.get("dataclass_transform_spec") is not None: + ti.dataclass_transform_spec = DataclassTransformSpec.deserialize( + data["dataclass_transform_spec"] + ) return ti @@ -3146,10 +3343,10 @@ class FakeInfo(TypeInfo): def __init__(self, msg: str) -> None: self.msg = msg - def __getattribute__(self, attr: str) -> None: + def __getattribute__(self, attr: str) -> type: # Handle __class__ so that isinstance still works... if attr == "__class__": - return object.__getattribute__(self, attr) + return object.__getattribute__(self, attr) # type: ignore[no-any-return] raise AssertionError(object.__getattribute__(self, "msg")) @@ -3180,10 +3377,9 @@ class TypeAlias(SymbolNode): class-valued attributes. See SemanticAnalyzerPass2.check_and_set_up_type_alias for details. - Aliases can be generic. Currently, mypy uses unbound type variables for - generic aliases and identifies them by name. Essentially, type aliases - work as macros that expand textually. The definition and expansion rules are - following: + Aliases can be generic. We use bound type variables for generic aliases, similar + to classes. Essentially, type aliases work as macros that expand textually. + The definition and expansion rules are following: 1. An alias targeting a generic class without explicit variables act as the given class (this doesn't apply to TypedDict, Tuple and Callable, which @@ -3234,11 +3430,11 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here Meaning of other fields: - target: The target type. For generic aliases contains unbound type variables - as nested types. + target: The target type. For generic aliases contains bound type variables + as nested types (currently TypeVar and ParamSpec are supported). _fullname: Qualified name of this type alias. This is used in particular to track fine grained dependencies from aliases. - alias_tvars: Names of unbound type variables used to define this alias. + alias_tvars: Type variables used to define this alias. normalized: Used to distinguish between `A = List`, and `A = list`. Both are internally stored using `builtins.list` (because `typing.List` is itself an alias), while the second cannot be subscripted because of @@ -3258,6 +3454,8 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here "eager", ) + __match_args__ = ("name", "target", "alias_tvars", "no_args") + def __init__( self, target: mypy.types.Type, @@ -3265,7 +3463,7 @@ def __init__( line: int, column: int, *, - alias_tvars: list[str] | None = None, + alias_tvars: list[mypy.types.TypeVarLikeType] | None = None, no_args: bool = False, normalized: bool = False, eager: bool = False, @@ -3285,8 +3483,13 @@ def __init__( @classmethod def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: - """Generate an alias to the tuple type described by a given TypeInfo.""" + """Generate an alias to the tuple type described by a given TypeInfo. + + NOTE: this doesn't set type alias type variables (for generic tuple types), + they must be set by the caller (when fully analyzed). + """ assert info.tuple_type + # TODO: is it possible to refactor this to set the correct type vars here? return TypeAlias( info.tuple_type.copy_modified(fallback=mypy.types.Instance(info, info.defn.type_vars)), info.fullname, @@ -3296,8 +3499,13 @@ def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: @classmethod def from_typeddict_type(cls, info: TypeInfo) -> TypeAlias: - """Generate an alias to the TypedDict type described by a given TypeInfo.""" + """Generate an alias to the TypedDict type described by a given TypeInfo. + + NOTE: this doesn't set type alias type variables (for generic TypedDicts), + they must be set by the caller (when fully analyzed). + """ assert info.typeddict_type + # TODO: is it possible to refactor this to set the correct type vars here? return TypeAlias( info.typeddict_type.copy_modified( fallback=mypy.types.Instance(info, info.defn.type_vars) @@ -3315,12 +3523,16 @@ def name(self) -> str: def fullname(self) -> str: return self._fullname + @property + def has_param_spec_type(self) -> bool: + return any(isinstance(v, mypy.types.ParamSpecType) for v in self.alias_tvars) + def serialize(self) -> JsonDict: data: JsonDict = { ".class": "TypeAlias", "fullname": self._fullname, "target": self.target.serialize(), - "alias_tvars": self.alias_tvars, + "alias_tvars": [v.serialize() for v in self.alias_tvars], "no_args": self.no_args, "normalized": self.normalized, "line": self.line, @@ -3335,7 +3547,8 @@ def accept(self, visitor: NodeVisitor[T]) -> T: def deserialize(cls, data: JsonDict) -> TypeAlias: assert data[".class"] == "TypeAlias" fullname = data["fullname"] - alias_tvars = data["alias_tvars"] + alias_tvars = [mypy.types.deserialize_type(v) for v in data["alias_tvars"]] + assert all(isinstance(t, mypy.types.TypeVarLikeType) for t in alias_tvars) target = mypy.types.deserialize_type(data["target"]) no_args = data["no_args"] normalized = data["normalized"] @@ -3346,7 +3559,7 @@ def deserialize(cls, data: JsonDict) -> TypeAlias: fullname, line, column, - alias_tvars=alias_tvars, + alias_tvars=cast(List[mypy.types.TypeVarLikeType], alias_tvars), no_args=no_args, normalized=normalized, ) @@ -3573,8 +3786,7 @@ def serialize(self, prefix: str, name: str) -> JsonDict: if prefix is not None: fullname = self.node.fullname if ( - fullname is not None - and "." in fullname + "." in fullname and fullname != prefix + "." + name and not (isinstance(self.node, Var) and self.node.from_module_getattr) ): @@ -3659,6 +3871,56 @@ def deserialize(cls, data: JsonDict) -> SymbolTable: return st +class DataclassTransformSpec: + """Specifies how a dataclass-like transform should be applied. The fields here are based on the + parameters accepted by `typing.dataclass_transform`.""" + + __slots__ = ( + "eq_default", + "order_default", + "kw_only_default", + "frozen_default", + "field_specifiers", + ) + + def __init__( + self, + *, + eq_default: bool | None = None, + order_default: bool | None = None, + kw_only_default: bool | None = None, + field_specifiers: tuple[str, ...] | None = None, + # Specified outside of PEP 681: + # frozen_default was added to CPythonin https://github.com/python/cpython/pull/99958 citing + # positive discussion in typing-sig + frozen_default: bool | None = None, + ): + self.eq_default = eq_default if eq_default is not None else True + self.order_default = order_default if order_default is not None else False + self.kw_only_default = kw_only_default if kw_only_default is not None else False + self.frozen_default = frozen_default if frozen_default is not None else False + self.field_specifiers = field_specifiers if field_specifiers is not None else () + + def serialize(self) -> JsonDict: + return { + "eq_default": self.eq_default, + "order_default": self.order_default, + "kw_only_default": self.kw_only_default, + "frozen_only_default": self.frozen_default, + "field_specifiers": list(self.field_specifiers), + } + + @classmethod + def deserialize(cls, data: JsonDict) -> DataclassTransformSpec: + return DataclassTransformSpec( + eq_default=data.get("eq_default"), + order_default=data.get("order_default"), + kw_only_default=data.get("kw_only_default"), + frozen_default=data.get("frozen_default"), + field_specifiers=tuple(data.get("field_specifiers", [])), + ) + + def get_flags(node: Node, names: list[str]) -> list[str]: return [name for name in names if getattr(node, name)] @@ -3705,7 +3967,7 @@ def check_arg_kinds( if kind == ARG_POS: if is_var_arg or is_kw_arg or seen_named or seen_opt: fail( - "Required positional args may not appear " "after default, named or var args", + "Required positional args may not appear after default, named or var args", node, ) break diff --git a/mypy/options.py b/mypy/options.py index ac46b70f8ebe..92c96a92c531 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -3,15 +3,13 @@ import pprint import re import sys -from typing import TYPE_CHECKING, Any, Callable, Mapping, Pattern +from typing import Any, Callable, Dict, Mapping, Pattern from typing_extensions import Final from mypy import defaults +from mypy.errorcodes import ErrorCode, error_codes from mypy.util import get_class_descriptors, replace_object_state -if TYPE_CHECKING: - from mypy.errorcodes import ErrorCode - class BuildType: STANDARD: Final = 0 @@ -27,6 +25,8 @@ class BuildType: "always_true", "check_untyped_defs", "debug_cache", + "disable_error_code", + "disabled_error_codes", "disallow_any_decorated", "disallow_any_explicit", "disallow_any_expr", @@ -37,28 +37,40 @@ class BuildType: "disallow_untyped_calls", "disallow_untyped_decorators", "disallow_untyped_defs", - "follow_imports", + "enable_error_code", + "enabled_error_codes", "follow_imports_for_stubs", + "follow_imports", "ignore_errors", "ignore_missing_imports", + "implicit_optional", "implicit_reexport", "local_partial_types", "mypyc", - "no_implicit_optional", - "show_none_errors", "strict_concatenate", "strict_equality", "strict_optional", - "strict_optional_whitelist", "warn_no_return", "warn_return_any", "warn_unreachable", "warn_unused_ignores", } -OPTIONS_AFFECTING_CACHE: Final = (PER_MODULE_OPTIONS | {"platform", "bazel", "plugins"}) - { - "debug_cache" -} +OPTIONS_AFFECTING_CACHE: Final = ( + PER_MODULE_OPTIONS + | { + "platform", + "bazel", + "plugins", + "disable_bytearray_promotion", + "disable_memoryview_promotion", + } +) - {"debug_cache"} + +# Features that are currently incomplete/experimental +TYPE_VAR_TUPLE: Final = "TypeVarTuple" +UNPACK: Final = "Unpack" +INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE, UNPACK)) class Options: @@ -77,6 +89,8 @@ def __init__(self) -> None: self.platform = sys.platform self.custom_typing_module: str | None = None self.custom_typeshed_dir: str | None = None + # The abspath() version of the above, we compute it once as an optimization. + self.abs_custom_typeshed_dir: str | None = None self.mypy_path: list[str] = [] self.report_dirs: dict[str, str] = {} # Show errors in PEP 561 packages/site-packages modules @@ -93,7 +107,7 @@ def __init__(self) -> None: # This allows definitions of packages without __init__.py and allows packages to span # multiple directories. This flag affects both import discovery and the association of # input files/modules/packages to the relevant file and fully qualified module name. - self.namespace_packages = False + self.namespace_packages = True # Use current directory and MYPYPATH to determine fully qualified module names of files # passed by automatically considering their subdirectories as packages. This is only # relevant if namespace packages are enabled, since otherwise examining __init__.py's is @@ -160,15 +174,8 @@ def __init__(self) -> None: self.color_output = True self.error_summary = True - # Files in which to allow strict-Optional related errors - # TODO: Kill this in favor of show_none_errors - self.strict_optional_whitelist: list[str] | None = None - - # Alternate way to show/hide strict-None-checking related errors - self.show_none_errors = True - - # Don't assume arguments with default values of None are Optional - self.no_implicit_optional = False + # Assume arguments with default values of None are Optional + self.implicit_optional = False # Don't re-export names unless they are imported with `from ... as ...` self.implicit_reexport = True @@ -220,6 +227,12 @@ def __init__(self) -> None: # supports globbing self.files: list[str] | None = None + # A list of packages for mypy to type check + self.packages: list[str] | None = None + + # A list of modules for mypy to type check + self.modules: list[str] | None = None + # Write junit.xml to given file self.junit_xml: str | None = None @@ -236,6 +249,9 @@ def __init__(self) -> None: # Read cache files in fine-grained incremental mode (cache must include dependencies) self.use_fine_grained_cache = False + # Run tree.serialize() even if cache generation is disabled + self.debug_serialize = False + # Tune certain behaviors when being used as a front-end to mypyc. Set per-module # in modules being compiled. Not in the config file or command line. self.mypyc = False @@ -267,8 +283,10 @@ def __init__(self) -> None: self.dump_type_stats = False self.dump_inference_stats = False self.dump_build_stats = False - self.enable_incomplete_features = False + self.enable_incomplete_features = False # deprecated + self.enable_incomplete_feature: list[str] = [] self.timing_stats: str | None = None + self.line_checking_stats: str | None = None # -- test options -- # Stop after the semantic analysis phase @@ -281,7 +299,7 @@ def __init__(self) -> None: self.shadow_file: list[list[str]] | None = None self.show_column_numbers: bool = False self.show_error_end: bool = False - self.show_error_codes = False + self.hide_error_codes = False # Use soft word wrap and show trimmed source snippets with error location markers. self.pretty = False self.dump_graph = False @@ -301,6 +319,8 @@ def __init__(self) -> None: self.fast_exit = True # fast path for finding modules from source set self.fast_module_lookup = False + # Allow empty function bodies even if it is not safe, used for testing only. + self.allow_empty_bodies = False # Used to transform source code before parsing if not None # TODO: Make the type precise (AnyStr -> AnyStr) self.transform_source: Callable[[Any], Any] | None = None @@ -315,9 +335,14 @@ def __init__(self) -> None: # skip most errors after this many messages have been reported. # -1 means unlimited. self.many_errors_threshold = defaults.MANY_ERRORS_THRESHOLD - # Enable recursive type aliases (currently experimental) + # Disable recursive type aliases (currently experimental) + self.disable_recursive_aliases = False + # Deprecated reverse version of the above, do not use. self.enable_recursive_aliases = False + self.disable_bytearray_promotion = False + self.disable_memoryview_promotion = False + # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer @property def new_semantic_analyzer(self) -> bool: @@ -347,6 +372,20 @@ def apply_changes(self, changes: dict[str, object]) -> Options: # This is the only option for which a per-module and a global # option sometimes beheave differently. new_options.ignore_missing_imports_per_module = True + + # These two act as overrides, so apply them when cloning. + # Similar to global codes enabling overrides disabling, so we start from latter. + new_options.disabled_error_codes = self.disabled_error_codes.copy() + new_options.enabled_error_codes = self.enabled_error_codes.copy() + for code_str in new_options.disable_error_code: + code = error_codes[code_str] + new_options.disabled_error_codes.add(code) + new_options.enabled_error_codes.discard(code) + for code_str in new_options.enable_error_code: + code = error_codes[code_str] + new_options.enabled_error_codes.add(code) + new_options.disabled_error_codes.discard(code) + return new_options def build_per_module_cache(self) -> None: @@ -446,4 +485,10 @@ def compile_glob(self, s: str) -> Pattern[str]: return re.compile(expr + "\\Z") def select_options_affecting_cache(self) -> Mapping[str, object]: - return {opt: getattr(self, opt) for opt in OPTIONS_AFFECTING_CACHE} + result: Dict[str, object] = {} + for opt in OPTIONS_AFFECTING_CACHE: + val = getattr(self, opt) + if opt in ("disabled_error_codes", "enabled_error_codes"): + val = sorted([code.code for code in val]) + result[opt] = val + return result diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py new file mode 100644 index 000000000000..9b8238eff83f --- /dev/null +++ b/mypy/partially_defined.py @@ -0,0 +1,662 @@ +from __future__ import annotations + +from enum import Enum + +from mypy import checker, errorcodes +from mypy.messages import MessageBuilder +from mypy.nodes import ( + AssertStmt, + AssignmentExpr, + AssignmentStmt, + BreakStmt, + ClassDef, + Context, + ContinueStmt, + DictionaryComprehension, + Expression, + ExpressionStmt, + ForStmt, + FuncDef, + FuncItem, + GeneratorExpr, + GlobalDecl, + IfStmt, + Import, + ImportFrom, + LambdaExpr, + ListExpr, + Lvalue, + MatchStmt, + MypyFile, + NameExpr, + NonlocalDecl, + RaiseStmt, + ReturnStmt, + StarExpr, + SymbolTable, + TryStmt, + TupleExpr, + WhileStmt, + WithStmt, + implicit_module_attrs, +) +from mypy.options import Options +from mypy.patterns import AsPattern, StarredPattern +from mypy.reachability import ALWAYS_TRUE, infer_pattern_value +from mypy.traverser import ExtendedTraverserVisitor +from mypy.types import Type, UninhabitedType + + +class BranchState: + """BranchState contains information about variable definition at the end of a branching statement. + `if` and `match` are examples of branching statements. + + `may_be_defined` contains variables that were defined in only some branches. + `must_be_defined` contains variables that were defined in all branches. + """ + + def __init__( + self, + must_be_defined: set[str] | None = None, + may_be_defined: set[str] | None = None, + skipped: bool = False, + ) -> None: + if may_be_defined is None: + may_be_defined = set() + if must_be_defined is None: + must_be_defined = set() + + self.may_be_defined = set(may_be_defined) + self.must_be_defined = set(must_be_defined) + self.skipped = skipped + + def copy(self) -> BranchState: + return BranchState( + must_be_defined=set(self.must_be_defined), + may_be_defined=set(self.may_be_defined), + skipped=self.skipped, + ) + + +class BranchStatement: + def __init__(self, initial_state: BranchState) -> None: + self.initial_state = initial_state + self.branches: list[BranchState] = [ + BranchState( + must_be_defined=self.initial_state.must_be_defined, + may_be_defined=self.initial_state.may_be_defined, + ) + ] + + def copy(self) -> BranchStatement: + result = BranchStatement(self.initial_state) + result.branches = [b.copy() for b in self.branches] + return result + + def next_branch(self) -> None: + self.branches.append( + BranchState( + must_be_defined=self.initial_state.must_be_defined, + may_be_defined=self.initial_state.may_be_defined, + ) + ) + + def record_definition(self, name: str) -> None: + assert len(self.branches) > 0 + self.branches[-1].must_be_defined.add(name) + self.branches[-1].may_be_defined.discard(name) + + def delete_var(self, name: str) -> None: + assert len(self.branches) > 0 + self.branches[-1].must_be_defined.discard(name) + self.branches[-1].may_be_defined.discard(name) + + def record_nested_branch(self, state: BranchState) -> None: + assert len(self.branches) > 0 + current_branch = self.branches[-1] + if state.skipped: + current_branch.skipped = True + return + current_branch.must_be_defined.update(state.must_be_defined) + current_branch.may_be_defined.update(state.may_be_defined) + current_branch.may_be_defined.difference_update(current_branch.must_be_defined) + + def skip_branch(self) -> None: + assert len(self.branches) > 0 + self.branches[-1].skipped = True + + def is_possibly_undefined(self, name: str) -> bool: + assert len(self.branches) > 0 + return name in self.branches[-1].may_be_defined + + def is_undefined(self, name: str) -> bool: + assert len(self.branches) > 0 + branch = self.branches[-1] + return name not in branch.may_be_defined and name not in branch.must_be_defined + + def is_defined_in_a_branch(self, name: str) -> bool: + assert len(self.branches) > 0 + for b in self.branches: + if name in b.must_be_defined or name in b.may_be_defined: + return True + return False + + def done(self) -> BranchState: + # First, compute all vars, including skipped branches. We include skipped branches + # because our goal is to capture all variables that semantic analyzer would + # consider defined. + all_vars = set() + for b in self.branches: + all_vars.update(b.may_be_defined) + all_vars.update(b.must_be_defined) + # For the rest of the things, we only care about branches that weren't skipped. + non_skipped_branches = [b for b in self.branches if not b.skipped] + if len(non_skipped_branches) > 0: + must_be_defined = non_skipped_branches[0].must_be_defined + for b in non_skipped_branches[1:]: + must_be_defined.intersection_update(b.must_be_defined) + else: + must_be_defined = set() + # Everything that wasn't defined in all branches but was defined + # in at least one branch should be in `may_be_defined`! + may_be_defined = all_vars.difference(must_be_defined) + return BranchState( + must_be_defined=must_be_defined, + may_be_defined=may_be_defined, + skipped=len(non_skipped_branches) == 0, + ) + + +class ScopeType(Enum): + Global = 1 + Class = 2 + Func = 3 + Generator = 3 + + +class Scope: + def __init__(self, stmts: list[BranchStatement], scope_type: ScopeType) -> None: + self.branch_stmts: list[BranchStatement] = stmts + self.scope_type = scope_type + self.undefined_refs: dict[str, set[NameExpr]] = {} + + def copy(self) -> Scope: + result = Scope([s.copy() for s in self.branch_stmts], self.scope_type) + result.undefined_refs = self.undefined_refs.copy() + return result + + def record_undefined_ref(self, o: NameExpr) -> None: + if o.name not in self.undefined_refs: + self.undefined_refs[o.name] = set() + self.undefined_refs[o.name].add(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + return self.undefined_refs.pop(name, set()) + + +class DefinedVariableTracker: + """DefinedVariableTracker manages the state and scope for the UndefinedVariablesVisitor.""" + + def __init__(self) -> None: + # There's always at least one scope. Within each scope, there's at least one "global" BranchingStatement. + self.scopes: list[Scope] = [Scope([BranchStatement(BranchState())], ScopeType.Global)] + # disable_branch_skip is used to disable skipping a branch due to a return/raise/etc. This is useful + # in things like try/except/finally statements. + self.disable_branch_skip = False + + def copy(self) -> DefinedVariableTracker: + result = DefinedVariableTracker() + result.scopes = [s.copy() for s in self.scopes] + result.disable_branch_skip = self.disable_branch_skip + return result + + def _scope(self) -> Scope: + assert len(self.scopes) > 0 + return self.scopes[-1] + + def enter_scope(self, scope_type: ScopeType) -> None: + assert len(self._scope().branch_stmts) > 0 + self.scopes.append( + Scope([BranchStatement(self._scope().branch_stmts[-1].branches[-1])], scope_type) + ) + + def exit_scope(self) -> None: + self.scopes.pop() + + def in_scope(self, scope_type: ScopeType) -> bool: + return self._scope().scope_type == scope_type + + def start_branch_statement(self) -> None: + assert len(self._scope().branch_stmts) > 0 + self._scope().branch_stmts.append( + BranchStatement(self._scope().branch_stmts[-1].branches[-1]) + ) + + def next_branch(self) -> None: + assert len(self._scope().branch_stmts) > 1 + self._scope().branch_stmts[-1].next_branch() + + def end_branch_statement(self) -> None: + assert len(self._scope().branch_stmts) > 1 + result = self._scope().branch_stmts.pop().done() + self._scope().branch_stmts[-1].record_nested_branch(result) + + def skip_branch(self) -> None: + # Only skip branch if we're outside of "root" branch statement. + if len(self._scope().branch_stmts) > 1 and not self.disable_branch_skip: + self._scope().branch_stmts[-1].skip_branch() + + def record_definition(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].record_definition(name) + + def delete_var(self, name: str) -> None: + assert len(self.scopes) > 0 + assert len(self.scopes[-1].branch_stmts) > 0 + self._scope().branch_stmts[-1].delete_var(name) + + def record_undefined_ref(self, o: NameExpr) -> None: + """Records an undefined reference. These can later be retrieved via `pop_undefined_ref`.""" + assert len(self.scopes) > 0 + self._scope().record_undefined_ref(o) + + def pop_undefined_ref(self, name: str) -> set[NameExpr]: + """If name has previously been reported as undefined, the NameExpr that was called will be returned.""" + assert len(self.scopes) > 0 + return self._scope().pop_undefined_ref(name) + + def is_possibly_undefined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 + # A variable is undefined if it's in a set of `may_be_defined` but not in `must_be_defined`. + return self._scope().branch_stmts[-1].is_possibly_undefined(name) + + def is_defined_in_different_branch(self, name: str) -> bool: + """This will return true if a variable is defined in a branch that's not the current branch.""" + assert len(self._scope().branch_stmts) > 0 + stmt = self._scope().branch_stmts[-1] + if not stmt.is_undefined(name): + return False + for stmt in self._scope().branch_stmts: + if stmt.is_defined_in_a_branch(name): + return True + return False + + def is_undefined(self, name: str) -> bool: + assert len(self._scope().branch_stmts) > 0 + return self._scope().branch_stmts[-1].is_undefined(name) + + +class Loop: + def __init__(self) -> None: + self.has_break = False + + +class PossiblyUndefinedVariableVisitor(ExtendedTraverserVisitor): + """Detects the following cases: + - A variable that's defined only part of the time. + - If a variable is used before definition + + An example of a partial definition: + if foo(): + x = 1 + print(x) # Error: "x" may be undefined. + + Example of a used before definition: + x = y + y: int = 2 + + Note that this code does not detect variables not defined in any of the branches -- that is + handled by the semantic analyzer. + """ + + def __init__( + self, + msg: MessageBuilder, + type_map: dict[Expression, Type], + options: Options, + names: SymbolTable, + ) -> None: + self.msg = msg + self.type_map = type_map + self.options = options + self.builtins = SymbolTable() + builtins_mod = names.get("__builtins__", None) + if builtins_mod: + assert isinstance(builtins_mod.node, MypyFile) + self.builtins = builtins_mod.node.names + self.loops: list[Loop] = [] + self.try_depth = 0 + self.tracker = DefinedVariableTracker() + for name in implicit_module_attrs: + self.tracker.record_definition(name) + + def var_used_before_def(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.USED_BEFORE_DEF): + self.msg.var_used_before_def(name, context) + + def variable_may_be_undefined(self, name: str, context: Context) -> None: + if self.msg.errors.is_error_code_enabled(errorcodes.POSSIBLY_UNDEFINED): + self.msg.variable_may_be_undefined(name, context) + + def process_definition(self, name: str) -> None: + # Was this name previously used? If yes, it's a used-before-definition error. + if not self.tracker.in_scope(ScopeType.Class): + # Errors in class scopes are caught by the semantic analyzer. + refs = self.tracker.pop_undefined_ref(name) + for ref in refs: + if self.loops: + self.variable_may_be_undefined(name, ref) + else: + self.var_used_before_def(name, ref) + self.tracker.record_definition(name) + + def visit_global_decl(self, o: GlobalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_global_decl(o) + + def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: + for name in o.names: + self.process_definition(name) + super().visit_nonlocal_decl(o) + + def process_lvalue(self, lvalue: Lvalue | None) -> None: + if isinstance(lvalue, NameExpr): + self.process_definition(lvalue.name) + elif isinstance(lvalue, StarExpr): + self.process_lvalue(lvalue.expr) + elif isinstance(lvalue, (ListExpr, TupleExpr)): + for item in lvalue.items: + self.process_lvalue(item) + + def visit_assignment_stmt(self, o: AssignmentStmt) -> None: + for lvalue in o.lvalues: + self.process_lvalue(lvalue) + super().visit_assignment_stmt(o) + + def visit_assignment_expr(self, o: AssignmentExpr) -> None: + o.value.accept(self) + self.process_lvalue(o.target) + + def visit_if_stmt(self, o: IfStmt) -> None: + for e in o.expr: + e.accept(self) + self.tracker.start_branch_statement() + for b in o.body: + if b.is_unreachable: + continue + b.accept(self) + self.tracker.next_branch() + if o.else_body: + if not o.else_body.is_unreachable: + o.else_body.accept(self) + else: + self.tracker.skip_branch() + self.tracker.end_branch_statement() + + def visit_match_stmt(self, o: MatchStmt) -> None: + o.subject.accept(self) + self.tracker.start_branch_statement() + for i in range(len(o.patterns)): + pattern = o.patterns[i] + pattern.accept(self) + guard = o.guards[i] + if guard is not None: + guard.accept(self) + if not o.bodies[i].is_unreachable: + o.bodies[i].accept(self) + else: + self.tracker.skip_branch() + is_catchall = infer_pattern_value(pattern) == ALWAYS_TRUE + if not is_catchall: + self.tracker.next_branch() + self.tracker.end_branch_statement() + + def visit_func_def(self, o: FuncDef) -> None: + self.process_definition(o.name) + self.tracker.enter_scope(ScopeType.Func) + super().visit_func_def(o) + self.tracker.exit_scope() + + def visit_func(self, o: FuncItem) -> None: + if o.is_dynamic() and not self.options.check_untyped_defs: + return + if o.arguments is not None: + for arg in o.arguments: + self.tracker.record_definition(arg.variable.name) + super().visit_func(o) + + def visit_generator_expr(self, o: GeneratorExpr) -> None: + self.tracker.enter_scope(ScopeType.Generator) + for idx in o.indices: + self.process_lvalue(idx) + super().visit_generator_expr(o) + self.tracker.exit_scope() + + def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: + self.tracker.enter_scope(ScopeType.Generator) + for idx in o.indices: + self.process_lvalue(idx) + super().visit_dictionary_comprehension(o) + self.tracker.exit_scope() + + def visit_for_stmt(self, o: ForStmt) -> None: + o.expr.accept(self) + self.process_lvalue(o.index) + o.index.accept(self) + self.tracker.start_branch_statement() + loop = Loop() + self.loops.append(loop) + o.body.accept(self) + self.tracker.next_branch() + self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + has_break = loop.has_break + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() + o.else_body.accept(self) + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() + + def visit_return_stmt(self, o: ReturnStmt) -> None: + super().visit_return_stmt(o) + self.tracker.skip_branch() + + def visit_lambda_expr(self, o: LambdaExpr) -> None: + self.tracker.enter_scope(ScopeType.Func) + super().visit_lambda_expr(o) + self.tracker.exit_scope() + + def visit_assert_stmt(self, o: AssertStmt) -> None: + super().visit_assert_stmt(o) + if checker.is_false_literal(o.expr): + self.tracker.skip_branch() + + def visit_raise_stmt(self, o: RaiseStmt) -> None: + super().visit_raise_stmt(o) + self.tracker.skip_branch() + + def visit_continue_stmt(self, o: ContinueStmt) -> None: + super().visit_continue_stmt(o) + self.tracker.skip_branch() + + def visit_break_stmt(self, o: BreakStmt) -> None: + super().visit_break_stmt(o) + if self.loops: + self.loops[-1].has_break = True + self.tracker.skip_branch() + + def visit_expression_stmt(self, o: ExpressionStmt) -> None: + if isinstance(self.type_map.get(o.expr, None), UninhabitedType): + self.tracker.skip_branch() + super().visit_expression_stmt(o) + + def visit_try_stmt(self, o: TryStmt) -> None: + """ + Note that finding undefined vars in `finally` requires different handling from + the rest of the code. In particular, we want to disallow skipping branches due to jump + statements in except/else clauses for finally but not for other cases. Imagine a case like: + def f() -> int: + try: + x = 1 + except: + # This jump statement needs to be handled differently depending on whether or + # not we're trying to process `finally` or not. + return 0 + finally: + # `x` may be undefined here. + pass + # `x` is always defined here. + return x + """ + self.try_depth += 1 + if o.finally_body is not None: + # In order to find undefined vars in `finally`, we need to + # process try/except with branch skipping disabled. However, for the rest of the code + # after finally, we need to process try/except with branch skipping enabled. + # Therefore, we need to process try/finally twice. + # Because processing is not idempotent, we should make a copy of the tracker. + old_tracker = self.tracker.copy() + self.tracker.disable_branch_skip = True + self.process_try_stmt(o) + self.tracker = old_tracker + self.process_try_stmt(o) + self.try_depth -= 1 + + def process_try_stmt(self, o: TryStmt) -> None: + """ + Processes try statement decomposing it into the following: + if ...: + body + else_body + elif ...: + except 1 + elif ...: + except 2 + else: + except n + finally + """ + self.tracker.start_branch_statement() + o.body.accept(self) + if o.else_body is not None: + o.else_body.accept(self) + if len(o.handlers) > 0: + assert len(o.handlers) == len(o.vars) == len(o.types) + for i in range(len(o.handlers)): + self.tracker.next_branch() + exc_type = o.types[i] + if exc_type is not None: + exc_type.accept(self) + var = o.vars[i] + if var is not None: + self.process_definition(var.name) + var.accept(self) + o.handlers[i].accept(self) + if var is not None: + self.tracker.delete_var(var.name) + self.tracker.end_branch_statement() + + if o.finally_body is not None: + o.finally_body.accept(self) + + def visit_while_stmt(self, o: WhileStmt) -> None: + o.expr.accept(self) + self.tracker.start_branch_statement() + loop = Loop() + self.loops.append(loop) + o.body.accept(self) + has_break = loop.has_break + if not checker.is_true_literal(o.expr): + # If this is a loop like `while True`, we can consider the body to be + # a single branch statement (we're guaranteed that the body is executed at least once). + # If not, call next_branch() to make all variables defined there conditional. + self.tracker.next_branch() + self.tracker.end_branch_statement() + if o.else_body is not None: + # If the loop has a `break` inside, `else` is executed conditionally. + # If the loop doesn't have a `break` either the function will return or + # execute the `else`. + if has_break: + self.tracker.start_branch_statement() + self.tracker.next_branch() + if o.else_body: + o.else_body.accept(self) + if has_break: + self.tracker.end_branch_statement() + self.loops.pop() + + def visit_as_pattern(self, o: AsPattern) -> None: + if o.name is not None: + self.process_lvalue(o.name) + super().visit_as_pattern(o) + + def visit_starred_pattern(self, o: StarredPattern) -> None: + if o.capture is not None: + self.process_lvalue(o.capture) + super().visit_starred_pattern(o) + + def visit_name_expr(self, o: NameExpr) -> None: + if o.name in self.builtins: + return + if self.tracker.is_possibly_undefined(o.name): + # A variable is only defined in some branches. + self.variable_may_be_undefined(o.name, o) + # We don't want to report the error on the same variable multiple times. + self.tracker.record_definition(o.name) + elif self.tracker.is_defined_in_different_branch(o.name): + # A variable is defined in one branch but used in a different branch. + if self.loops or self.try_depth > 0: + # If we're in a loop or in a try, we can't be sure that this variable + # is undefined. Report it as "may be undefined". + self.variable_may_be_undefined(o.name, o) + else: + self.var_used_before_def(o.name, o) + elif self.tracker.is_undefined(o.name): + # A variable is undefined. It could be due to two things: + # 1. A variable is just totally undefined + # 2. The variable is defined later in the code. + # Case (1) will be caught by semantic analyzer. Case (2) is a forward ref that should + # be caught by this visitor. Save the ref for later, so that if we see a definition, + # we know it's a used-before-definition scenario. + self.tracker.record_undefined_ref(o) + super().visit_name_expr(o) + + def visit_with_stmt(self, o: WithStmt) -> None: + for expr, idx in zip(o.expr, o.target): + expr.accept(self) + self.process_lvalue(idx) + o.body.accept(self) + + def visit_class_def(self, o: ClassDef) -> None: + self.process_definition(o.name) + self.tracker.enter_scope(ScopeType.Class) + super().visit_class_def(o) + self.tracker.exit_scope() + + def visit_import(self, o: Import) -> None: + for mod, alias in o.ids: + if alias is not None: + self.tracker.record_definition(alias) + else: + # When you do `import x.y`, only `x` becomes defined. + names = mod.split(".") + if len(names) > 0: + # `names` should always be nonempty, but we don't want mypy + # to crash on invalid code. + self.tracker.record_definition(names[0]) + super().visit_import(o) + + def visit_import_from(self, o: ImportFrom) -> None: + for mod, alias in o.names: + name = alias + if name is None: + name = mod + self.tracker.record_definition(name) + super().visit_import_from(o) diff --git a/mypy/plugin.py b/mypy/plugin.py index dc31130df991..cf124b45d04f 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -297,6 +297,10 @@ def parse_bool(self, expr: Expression) -> bool | None: """Parse True/False literals.""" raise NotImplementedError + @abstractmethod + def parse_str_literal(self, expr: Expression) -> str | None: + """Parse string literals.""" + @abstractmethod def fail( self, @@ -407,6 +411,10 @@ def final_iteration(self) -> bool: def is_stub_file(self) -> bool: raise NotImplementedError + @abstractmethod + def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Type | None: + raise NotImplementedError + # A context for querying for configuration data about a module for # cache invalidation purposes. diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index e180d435dc35..6fda965ade8b 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -3,9 +3,10 @@ from __future__ import annotations from typing import Iterable, List, cast -from typing_extensions import Final +from typing_extensions import Final, Literal import mypy.plugin # To avoid circular imports. +from mypy.errorcodes import LITERAL_REQ from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type from mypy.nodes import ( ARG_NAMED, @@ -75,7 +76,7 @@ SELF_TVAR_NAME: Final = "_AT" MAGIC_ATTR_NAME: Final = "__attrs_attrs__" -MAGIC_ATTR_CLS_NAME: Final = "_AttrsAttributes" # The namedtuple subclass name. +MAGIC_ATTR_CLS_NAME_TEMPLATE: Final = "__{}_AttrsAttributes__" # The tuple subclass pattern. class Converter: @@ -246,7 +247,11 @@ def _get_decorator_optional_bool_argument( return False if attr_value.fullname == "builtins.None": return None - ctx.api.fail(f'"{name}" argument must be True or False.', ctx.reason) + ctx.api.fail( + f'"{name}" argument must be a True, False, or None literal', + ctx.reason, + code=LITERAL_REQ, + ) return default return default else: @@ -257,7 +262,7 @@ def attr_tag_callback(ctx: mypy.plugin.ClassDefContext) -> None: """Record that we have an attrs class in the main semantic analysis pass. The later pass implemented by attr_class_maker_callback will use this - to detect attrs lasses in base classes. + to detect attrs classes in base classes. """ # The value is ignored, only the existence matters. ctx.cls.info.metadata["attrs_tag"] = {} @@ -324,8 +329,8 @@ def attr_class_maker_callback( } adder = MethodAdder(ctx) - if init: - _add_init(ctx, attributes, adder) + # If __init__ is not being generated, attrs still generates it as __attrs_init__ instead. + _add_init(ctx, attributes, adder, "__init__" if init else "__attrs_init__") if order: _add_order(ctx, adder) if frozen: @@ -736,7 +741,11 @@ def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) if attribute.name in ctx.cls.info.names: # This variable belongs to this class so we can modify it. node = ctx.cls.info.names[attribute.name].node - assert isinstance(node, Var) + if not isinstance(node, Var): + # The superclass attribute was overridden with a non-variable. + # No need to do anything here, override will be verified during + # type checking. + continue node.is_property = True else: # This variable belongs to a super class so create new Var so we @@ -749,13 +758,17 @@ def _make_frozen(ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute]) def _add_init( - ctx: mypy.plugin.ClassDefContext, attributes: list[Attribute], adder: MethodAdder + ctx: mypy.plugin.ClassDefContext, + attributes: list[Attribute], + adder: MethodAdder, + method_name: Literal["__init__", "__attrs_init__"], ) -> None: """Generate an __init__ method for the attributes and add it to the class.""" - # Convert attributes to arguments with kw_only arguments at the end of + # Convert attributes to arguments with kw_only arguments at the end of # the argument list pos_args = [] kw_only_args = [] + sym_table = ctx.cls.info.names for attribute in attributes: if not attribute.init: continue @@ -763,6 +776,13 @@ def _add_init( kw_only_args.append(attribute.argument(ctx)) else: pos_args.append(attribute.argument(ctx)) + + # If the attribute is Final, present in `__init__` and has + # no default, make sure it doesn't error later. + if not attribute.has_default and attribute.name in sym_table: + sym_node = sym_table[attribute.name].node + if isinstance(sym_node, Var) and sym_node.is_final: + sym_node.final_set_in_init = True args = pos_args + kw_only_args if all( # We use getattr rather than instance checks because the variable.type @@ -777,7 +797,7 @@ def _add_init( for a in args: a.variable.type = AnyType(TypeOfAny.implementation_artifact) a.type_annotation = AnyType(TypeOfAny.implementation_artifact) - adder.add_method("__init__", args, NoneType()) + adder.add_method(method_name, args, NoneType()) def _add_attrs_magic_attribute( @@ -792,10 +812,11 @@ def _add_attrs_magic_attribute( "builtins.tuple", [ctx.api.named_type_or_none("attr.Attribute", [any_type]) or any_type] ) - ti = ctx.api.basic_new_typeinfo(MAGIC_ATTR_CLS_NAME, fallback_type, 0) - ti.is_named_tuple = True + attr_name = MAGIC_ATTR_CLS_NAME_TEMPLATE.format(ctx.cls.fullname.replace(".", "_")) + ti = ctx.api.basic_new_typeinfo(attr_name, fallback_type, 0) for (name, _), attr_type in zip(attrs, attributes_types): var = Var(name, attr_type) + var._fullname = name var.is_property = True proper_type = get_proper_type(attr_type) if isinstance(proper_type, Instance): @@ -803,14 +824,18 @@ def _add_attrs_magic_attribute( ti.names[name] = SymbolTableNode(MDEF, var, plugin_generated=True) attributes_type = Instance(ti, []) - # TODO: refactor using `add_attribute_to_class` - var = Var(name=MAGIC_ATTR_NAME, type=TupleType(attributes_types, fallback=attributes_type)) - var.info = ctx.cls.info - var.is_classvar = True - var._fullname = f"{ctx.cls.fullname}.{MAGIC_ATTR_CLS_NAME}" - var.allow_incompatible_override = True - ctx.cls.info.names[MAGIC_ATTR_NAME] = SymbolTableNode( - kind=MDEF, node=var, plugin_generated=True, no_serialize=True + # We need to stash the type of the magic attribute so it can be + # loaded on cached runs. + ctx.cls.info.names[attr_name] = SymbolTableNode(MDEF, ti, plugin_generated=True) + + add_attribute_to_class( + ctx.api, + ctx.cls, + MAGIC_ATTR_NAME, + TupleType(attributes_types, fallback=attributes_type), + fullname=f"{ctx.cls.fullname}.{MAGIC_ATTR_NAME}", + override_allow_incompatible=True, + is_classvar=True, ) diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index edcf8ea9a082..0acf3e3a6369 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -9,16 +9,22 @@ Block, CallExpr, ClassDef, + Decorator, Expression, FuncDef, JsonDict, + Node, PassStmt, RefExpr, SymbolTableNode, Var, ) from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface -from mypy.semanal import ALLOW_INCOMPATIBLE_OVERRIDE, set_callable_name +from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, + require_bool_literal_argument, + set_callable_name, +) from mypy.typeops import ( # noqa: F401 # Part of public API try_getting_str_literals as try_getting_str_literals, ) @@ -26,6 +32,7 @@ CallableType, Overloaded, Type, + TypeType, TypeVarType, deserialize_type, get_proper_type, @@ -51,11 +58,7 @@ def _get_bool_argument(ctx: ClassDefContext, expr: CallExpr, name: str, default: """ attr_value = _get_argument(expr, name) if attr_value: - ret = ctx.api.parse_bool(attr_value) - if ret is None: - ctx.api.fail(f'"{name}" argument must be True or False.', expr) - return default - return ret + return require_bool_literal_argument(ctx.api, attr_value, name, default) return default @@ -66,19 +69,7 @@ def _get_argument(call: CallExpr, name: str) -> Expression | None: # # Note: I'm not hard-coding the index so that in the future we can support other # attrib and class makers. - if not isinstance(call.callee, RefExpr): - return None - - callee_type = None - callee_node = call.callee.node - if isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type: - callee_node_type = get_proper_type(callee_node.type) - if isinstance(callee_node_type, Overloaded): - # We take the last overload. - callee_type = callee_node_type.items[-1] - elif isinstance(callee_node_type, CallableType): - callee_type = callee_node_type - + callee_type = _get_callee_type(call) if not callee_type: return None @@ -92,6 +83,31 @@ def _get_argument(call: CallExpr, name: str) -> Expression | None: return attr_value if attr_name == argument.name: return attr_value + + return None + + +def _get_callee_type(call: CallExpr) -> CallableType | None: + """Return the type of the callee, regardless of its syntatic form.""" + + callee_node: Node | None = call.callee + + if isinstance(callee_node, RefExpr): + callee_node = callee_node.node + + # Some decorators may be using typing.dataclass_transform, which is itself a decorator, so we + # need to unwrap them to get at the true callee + if isinstance(callee_node, Decorator): + callee_node = callee_node.func + + if isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type: + callee_node_type = get_proper_type(callee_node.type) + if isinstance(callee_node_type, Overloaded): + # We take the last overload. + return callee_node_type.items[-1] + elif isinstance(callee_node_type, CallableType): + return callee_node_type + return None @@ -102,6 +118,8 @@ def add_method( return_type: Type, self_type: Type | None = None, tvar_def: TypeVarType | None = None, + is_classmethod: bool = False, + is_staticmethod: bool = False, ) -> None: """ Adds a new method to a class. @@ -115,6 +133,8 @@ def add_method( return_type=return_type, self_type=self_type, tvar_def=tvar_def, + is_classmethod=is_classmethod, + is_staticmethod=is_staticmethod, ) @@ -126,8 +146,15 @@ def add_method_to_class( return_type: Type, self_type: Type | None = None, tvar_def: TypeVarType | None = None, + is_classmethod: bool = False, + is_staticmethod: bool = False, ) -> None: """Adds a new method to a class definition.""" + + assert not ( + is_classmethod is True and is_staticmethod is True + ), "Can't add a new method that's both staticmethod and classmethod." + info = cls.info # First remove any previously generated methods with the same name @@ -137,13 +164,21 @@ def add_method_to_class( if sym.plugin_generated and isinstance(sym.node, FuncDef): cls.defs.body.remove(sym.node) - self_type = self_type or fill_typevars(info) if isinstance(api, SemanticAnalyzerPluginInterface): function_type = api.named_type("builtins.function") else: function_type = api.named_generic_type("builtins.function", []) - args = [Argument(Var("self"), self_type, None, ARG_POS)] + args + if is_classmethod: + self_type = self_type or TypeType(fill_typevars(info)) + first = [Argument(Var("_cls"), self_type, None, ARG_POS, True)] + elif is_staticmethod: + first = [] + else: + self_type = self_type or fill_typevars(info) + first = [Argument(Var("self"), self_type, None, ARG_POS)] + args = first + args + arg_types, arg_names, arg_kinds = [], [], [] for arg in args: assert arg.type_annotation, "All arguments must be fully typed." @@ -158,6 +193,8 @@ def add_method_to_class( func = FuncDef(name, args, Block([PassStmt()])) func.info = info func.type = set_callable_name(signature, func) + func.is_class = is_classmethod + func.is_static = is_staticmethod func._fullname = info.fullname + "." + name func.line = info.line @@ -168,7 +205,21 @@ def add_method_to_class( r_name = get_unique_redefinition_name(name, info.names) info.names[r_name] = info.names[name] - info.names[name] = SymbolTableNode(MDEF, func, plugin_generated=True) + # Add decorator for is_staticmethod. It's unnecessary for is_classmethod. + if is_staticmethod: + func.is_decorated = True + v = Var(name, func.type) + v.info = info + v._fullname = func._fullname + v.is_staticmethod = True + dec = Decorator(func, [], v) + dec.line = info.line + sym = SymbolTableNode(MDEF, dec) + else: + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + info.names[name] = sym + info.defn.defs.body.append(func) @@ -180,6 +231,8 @@ def add_attribute_to_class( final: bool = False, no_serialize: bool = False, override_allow_incompatible: bool = False, + fullname: str | None = None, + is_classvar: bool = False, ) -> None: """ Adds a new attribute to a class definition. @@ -197,11 +250,17 @@ def add_attribute_to_class( node = Var(name, typ) node.info = info node.is_final = final + node.is_classvar = is_classvar if name in ALLOW_INCOMPATIBLE_OVERRIDE: node.allow_incompatible_override = True else: node.allow_incompatible_override = override_allow_incompatible - node._fullname = info.fullname + "." + name + + if fullname: + node._fullname = fullname + else: + node._fullname = info.fullname + "." + name + info.names[name] = SymbolTableNode( MDEF, node, plugin_generated=True, no_serialize=no_serialize ) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 095967dc3fa1..7694134ac09e 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -2,8 +2,11 @@ from __future__ import annotations +from typing import Optional from typing_extensions import Final +from mypy import errorcodes, message_registry +from mypy.expandtype import expand_type from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -15,12 +18,16 @@ Argument, AssignmentStmt, CallExpr, + ClassDef, Context, + DataclassTransformSpec, Expression, JsonDict, NameExpr, + Node, PlaceholderNode, RefExpr, + Statement, SymbolTableNode, TempNode, TypeAlias, @@ -32,9 +39,10 @@ from mypy.plugins.common import ( _get_decorator_bool_argument, add_attribute_to_class, - add_method, + add_method_to_class, deserialize_and_fixup_type, ) +from mypy.semanal_shared import find_dataclass_transform_spec, require_bool_literal_argument from mypy.server.trigger import make_wildcard_trigger from mypy.state import state from mypy.typeops import map_type_from_supertype @@ -50,20 +58,27 @@ TypeVarType, get_proper_type, ) +from mypy.typevars import fill_typevars # The set of decorators that generate dataclasses. dataclass_makers: Final = {"dataclass", "dataclasses.dataclass"} -# The set of functions that generate dataclass fields. -field_makers: Final = {"dataclasses.field"} SELF_TVAR_NAME: Final = "_DT" +_TRANSFORM_SPEC_FOR_DATACLASSES = DataclassTransformSpec( + eq_default=True, + order_default=False, + kw_only_default=False, + frozen_default=False, + field_specifiers=("dataclasses.Field", "dataclasses.field"), +) class DataclassAttribute: def __init__( self, name: str, + alias: str | None, is_in_init: bool, is_init_var: bool, has_default: bool, @@ -74,6 +89,7 @@ def __init__( kw_only: bool, ) -> None: self.name = name + self.alias = alias self.is_in_init = is_in_init self.is_init_var = is_init_var self.has_default = has_default @@ -83,7 +99,7 @@ def __init__( self.info = info self.kw_only = kw_only - def to_argument(self) -> Argument: + def to_argument(self, current_info: TypeInfo) -> Argument: arg_kind = ARG_POS if self.kw_only and self.has_default: arg_kind = ARG_NAMED_OPT @@ -92,16 +108,29 @@ def to_argument(self) -> Argument: elif not self.kw_only and self.has_default: arg_kind = ARG_OPT return Argument( - variable=self.to_var(), type_annotation=self.type, initializer=None, kind=arg_kind + variable=self.to_var(current_info), + type_annotation=self.expand_type(current_info), + initializer=None, + kind=arg_kind, ) - def to_var(self) -> Var: - return Var(self.name, self.type) + def expand_type(self, current_info: TypeInfo) -> Optional[Type]: + if self.type is not None and self.info.self_type is not None: + # In general, it is not safe to call `expand_type()` during semantic analyzis, + # however this plugin is called very late, so all types should be fully ready. + # Also, it is tricky to avoid eager expansion of Self types here (e.g. because + # we serialize attributes). + return expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)}) + return self.type + + def to_var(self, current_info: TypeInfo) -> Var: + return Var(self.alias or self.name, self.expand_type(current_info)) def serialize(self) -> JsonDict: assert self.type return { "name": self.name, + "alias": self.alias, "is_in_init": self.is_in_init, "is_init_var": self.is_init_var, "has_default": self.has_default, @@ -138,16 +167,26 @@ class DataclassTransformer: there are no placeholders. """ - def __init__(self, ctx: ClassDefContext) -> None: - self._ctx = ctx + def __init__( + self, + cls: ClassDef, + # Statement must also be accepted since class definition itself may be passed as the reason + # for subclass/metaclass-based uses of `typing.dataclass_transform` + reason: Expression | Statement, + spec: DataclassTransformSpec, + api: SemanticAnalyzerPluginInterface, + ) -> None: + self._cls = cls + self._reason = reason + self._spec = spec + self._api = api def transform(self) -> bool: """Apply all the necessary transformations to the underlying dataclass so as to ensure it is fully type checked according to the rules in PEP 557. """ - ctx = self._ctx - info = self._ctx.cls.info + info = self._cls.info attributes = self.collect_attributes() if attributes is None: # Some definitions are not ready. We need another pass. @@ -156,14 +195,14 @@ def transform(self) -> bool: if attr.type is None: return False decorator_arguments = { - "init": _get_decorator_bool_argument(self._ctx, "init", True), - "eq": _get_decorator_bool_argument(self._ctx, "eq", True), - "order": _get_decorator_bool_argument(self._ctx, "order", False), - "frozen": _get_decorator_bool_argument(self._ctx, "frozen", False), - "slots": _get_decorator_bool_argument(self._ctx, "slots", False), - "match_args": _get_decorator_bool_argument(self._ctx, "match_args", True), + "init": self._get_bool_arg("init", True), + "eq": self._get_bool_arg("eq", self._spec.eq_default), + "order": self._get_bool_arg("order", self._spec.order_default), + "frozen": self._get_bool_arg("frozen", self._spec.frozen_default), + "slots": self._get_bool_arg("slots", False), + "match_args": self._get_bool_arg("match_args", True), } - py_version = self._ctx.api.options.python_version + py_version = self._api.options.python_version # If there are no attributes, it may be that the semantic analyzer has not # processed them yet. In order to work around this, we can simply skip generating @@ -175,11 +214,12 @@ def transform(self) -> bool: and attributes ): - args = [ - attr.to_argument() - for attr in attributes - if attr.is_in_init and not self._is_kw_only_type(attr.type) - ] + with state.strict_optional_set(self._api.options.strict_optional): + args = [ + attr.to_argument(info) + for attr in attributes + if attr.is_in_init and not self._is_kw_only_type(attr.type) + ] if info.fallback_to_any: # Make positional args optional since we don't know their order. @@ -196,7 +236,9 @@ def transform(self) -> bool: Argument(nameless_var, AnyType(TypeOfAny.explicit), None, ARG_STAR2), ] - add_method(ctx, "__init__", args=args, return_type=NoneType()) + add_method_to_class( + self._api, self._cls, "__init__", args=args, return_type=NoneType() + ) if ( decorator_arguments["eq"] @@ -204,7 +246,7 @@ def transform(self) -> bool: or decorator_arguments["order"] ): # Type variable for self types in generated methods. - obj_type = ctx.api.named_type("builtins.object") + obj_type = self._api.named_type("builtins.object") self_tvar_expr = TypeVarExpr( SELF_TVAR_NAME, info.fullname + "." + SELF_TVAR_NAME, [], obj_type ) @@ -213,16 +255,16 @@ def transform(self) -> bool: # Add <, >, <=, >=, but only if the class has an eq method. if decorator_arguments["order"]: if not decorator_arguments["eq"]: - ctx.api.fail("eq must be True if order is True", ctx.cls) + self._api.fail('"eq" must be True if "order" is True', self._reason) for method_name in ["__lt__", "__gt__", "__le__", "__ge__"]: # Like for __eq__ and __ne__, we want "other" to match # the self type. - obj_type = ctx.api.named_type("builtins.object") + obj_type = self._api.named_type("builtins.object") order_tvar_def = TypeVarType( SELF_TVAR_NAME, info.fullname + "." + SELF_TVAR_NAME, -1, [], obj_type ) - order_return_type = ctx.api.named_type("builtins.bool") + order_return_type = self._api.named_type("builtins.bool") order_args = [ Argument(Var("other", order_tvar_def), order_tvar_def, None, ARG_POS) ] @@ -230,13 +272,14 @@ def transform(self) -> bool: existing_method = info.get(method_name) if existing_method is not None and not existing_method.plugin_generated: assert existing_method.node - ctx.api.fail( - f"You may not have a custom {method_name} method when order=True", + self._api.fail( + f'You may not have a custom "{method_name}" method when "order" is True', existing_method.node, ) - add_method( - ctx, + add_method_to_class( + self._api, + self._cls, method_name, args=order_args, return_type=order_return_type, @@ -244,10 +287,20 @@ def transform(self) -> bool: tvar_def=order_tvar_def, ) + parent_decorator_arguments = [] + for parent in info.mro[1:-1]: + parent_args = parent.metadata.get("dataclass") + if parent_args: + parent_decorator_arguments.append(parent_args) + if decorator_arguments["frozen"]: + if any(not parent["frozen"] for parent in parent_decorator_arguments): + self._api.fail("Cannot inherit frozen dataclass from a non-frozen one", info) self._propertize_callables(attributes, settable=False) self._freeze(attributes) else: + if any(parent["frozen"] for parent in parent_decorator_arguments): + self._api.fail("Cannot inherit non-frozen dataclass from a frozen one", info) self._propertize_callables(attributes) if decorator_arguments["slots"]: @@ -263,12 +316,12 @@ def transform(self) -> bool: and attributes and py_version >= (3, 10) ): - str_type = ctx.api.named_type("builtins.str") + str_type = self._api.named_type("builtins.str") literals: list[Type] = [ LiteralType(attr.name, str_type) for attr in attributes if attr.is_in_init ] - match_args_type = TupleType(literals, ctx.api.named_type("builtins.tuple")) - add_attribute_to_class(ctx.api, ctx.cls, "__match_args__", match_args_type) + match_args_type = TupleType(literals, self._api.named_type("builtins.tuple")) + add_attribute_to_class(self._api, self._cls, "__match_args__", match_args_type) self._add_dataclass_fields_magic_attribute() @@ -285,10 +338,10 @@ def add_slots( if not correct_version: # This means that version is lower than `3.10`, # it is just a non-existent argument for `dataclass` function. - self._ctx.api.fail( + self._api.fail( 'Keyword argument "slots" for "dataclass" ' "is only valid in Python 3.10 and higher", - self._ctx.reason, + self._reason, ) return @@ -300,11 +353,11 @@ def add_slots( # Class explicitly specifies a different `__slots__` field. # And `@dataclass(slots=True)` is used. # In runtime this raises a type error. - self._ctx.api.fail( + self._api.fail( '"{}" both defines "__slots__" and is used with "slots=True"'.format( - self._ctx.cls.name + self._cls.name ), - self._ctx.cls, + self._cls, ) return @@ -340,12 +393,51 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: Return None if some dataclass base class hasn't been processed yet and thus we'll need to ask for another pass. """ - # First, collect attributes belonging to the current class. - ctx = self._ctx - cls = self._ctx.cls - attrs: list[DataclassAttribute] = [] - known_attrs: set[str] = set() - kw_only = _get_decorator_bool_argument(ctx, "kw_only", False) + cls = self._cls + + # First, collect attributes belonging to any class in the MRO, ignoring duplicates. + # + # We iterate through the MRO in reverse because attrs defined in the parent must appear + # earlier in the attributes list than attrs defined in the child. See: + # https://docs.python.org/3/library/dataclasses.html#inheritance + # + # However, we also want attributes defined in the subtype to override ones defined + # in the parent. We can implement this via a dict without disrupting the attr order + # because dicts preserve insertion order in Python 3.7+. + found_attrs: dict[str, DataclassAttribute] = {} + found_dataclass_supertype = False + for info in reversed(cls.info.mro[1:-1]): + if "dataclass_tag" in info.metadata and "dataclass" not in info.metadata: + # We haven't processed the base class yet. Need another pass. + return None + if "dataclass" not in info.metadata: + continue + + # Each class depends on the set of attributes in its dataclass ancestors. + self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + found_dataclass_supertype = True + + for data in info.metadata["dataclass"]["attributes"]: + name: str = data["name"] + + attr = DataclassAttribute.deserialize(info, data, self._api) + # TODO: We shouldn't be performing type operations during the main + # semantic analysis pass, since some TypeInfo attributes might + # still be in flux. This should be performed in a later phase. + with state.strict_optional_set(self._api.options.strict_optional): + attr.expand_typevar_from_subtype(cls.info) + found_attrs[name] = attr + + sym_node = cls.info.names.get(name) + if sym_node and sym_node.node and not isinstance(sym_node.node, Var): + self._api.fail( + "Dataclass attribute may only be overridden by another attribute", + sym_node.node, + ) + + # Second, collect attributes belonging to the current class. + current_attr_names: set[str] = set() + kw_only = self._get_bool_arg("kw_only", self._spec.kw_only_default) for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. @@ -367,8 +459,8 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: assert not isinstance(node, PlaceholderNode) if isinstance(node, TypeAlias): - ctx.api.fail( - ("Type aliases inside dataclass definitions " "are not supported at runtime"), + self._api.fail( + ("Type aliases inside dataclass definitions are not supported at runtime"), node, ) # Skip processing this node. This doesn't match the runtime behaviour, @@ -395,19 +487,24 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: if self._is_kw_only_type(node_type): kw_only = True - has_field_call, field_args = _collect_field_args(stmt.rvalue, ctx) + has_field_call, field_args = self._collect_field_args(stmt.rvalue) is_in_init_param = field_args.get("init") if is_in_init_param is None: is_in_init = True else: - is_in_init = bool(ctx.api.parse_bool(is_in_init_param)) + is_in_init = bool(self._api.parse_bool(is_in_init_param)) has_default = False # Ensure that something like x: int = field() is rejected # after an attribute with a default. if has_field_call: - has_default = "default" in field_args or "default_factory" in field_args + has_default = ( + "default" in field_args + or "default_factory" in field_args + # alias for default_factory defined in PEP 681 + or "factory" in field_args + ) # All other assignments are already type checked. elif not isinstance(stmt.rvalue, TempNode): @@ -423,68 +520,60 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # kw_only value from the decorator parameter. field_kw_only_param = field_args.get("kw_only") if field_kw_only_param is not None: - is_kw_only = bool(ctx.api.parse_bool(field_kw_only_param)) - - known_attrs.add(lhs.name) - attrs.append( - DataclassAttribute( - name=lhs.name, - is_in_init=is_in_init, - is_init_var=is_init_var, - has_default=has_default, - line=stmt.line, - column=stmt.column, - type=sym.type, - info=cls.info, - kw_only=is_kw_only, - ) - ) - - # Next, collect attributes belonging to any class in the MRO - # as long as those attributes weren't already collected. This - # makes it possible to overwrite attributes in subclasses. - # copy() because we potentially modify all_attrs below and if this code requires debugging - # we'll have unmodified attrs laying around. - all_attrs = attrs.copy() - for info in cls.info.mro[1:-1]: - if "dataclass_tag" in info.metadata and "dataclass" not in info.metadata: - # We haven't processed the base class yet. Need another pass. - return None - if "dataclass" not in info.metadata: - continue + value = self._api.parse_bool(field_kw_only_param) + if value is not None: + is_kw_only = value + else: + self._api.fail('"kw_only" argument must be a boolean literal', stmt.rvalue) + + if sym.type is None and node.is_final and node.is_inferred: + # This is a special case, assignment like x: Final = 42 is classified + # annotated above, but mypy strips the `Final` turning it into x = 42. + # We do not support inferred types in dataclasses, so we can try inferring + # type for simple literals, and otherwise require an explicit type + # argument for Final[...]. + typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) + if typ: + node.type = typ + else: + self._api.fail( + "Need type argument for Final[...] with non-literal default in dataclass", + stmt, + ) + node.type = AnyType(TypeOfAny.from_error) + + alias = None + if "alias" in field_args: + alias = self._api.parse_str_literal(field_args["alias"]) + if alias is None: + self._api.fail( + message_registry.DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL, + stmt.rvalue, + code=errorcodes.LITERAL_REQ, + ) - super_attrs = [] - # Each class depends on the set of attributes in its dataclass ancestors. - ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + current_attr_names.add(lhs.name) + found_attrs[lhs.name] = DataclassAttribute( + name=lhs.name, + alias=alias, + is_in_init=is_in_init, + is_init_var=is_init_var, + has_default=has_default, + line=stmt.line, + column=stmt.column, + type=sym.type, + info=cls.info, + kw_only=is_kw_only, + ) - for data in info.metadata["dataclass"]["attributes"]: - name: str = data["name"] - if name not in known_attrs: - attr = DataclassAttribute.deserialize(info, data, ctx.api) - # TODO: We shouldn't be performing type operations during the main - # semantic analysis pass, since some TypeInfo attributes might - # still be in flux. This should be performed in a later phase. - with state.strict_optional_set(ctx.api.options.strict_optional): - attr.expand_typevar_from_subtype(ctx.cls.info) - known_attrs.add(name) - super_attrs.append(attr) - elif all_attrs: - # How early in the attribute list an attribute appears is determined by the - # reverse MRO, not simply MRO. - # See https://docs.python.org/3/library/dataclasses.html#inheritance for - # details. - for attr in all_attrs: - if attr.name == name: - all_attrs.remove(attr) - super_attrs.append(attr) - break - all_attrs = super_attrs + all_attrs + all_attrs = list(found_attrs.values()) + if found_dataclass_supertype: all_attrs.sort(key=lambda a: a.kw_only) - # Ensure that arguments without a default don't follow - # arguments that have a default. + # Third, ensure that arguments without a default don't follow + # arguments that have a default and that the KW_ONLY sentinel + # is only provided once. found_default = False - # Ensure that the KW_ONLY sentinel is only provided once found_kw_sentinel = False for attr in all_attrs: # If we find any attribute that is_in_init, not kw_only, and that @@ -493,32 +582,37 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: if found_default and attr.is_in_init and not attr.has_default and not attr.kw_only: # If the issue comes from merging different classes, report it # at the class definition point. - context = Context(line=attr.line, column=attr.column) if attr in attrs else ctx.cls - ctx.api.fail( + context: Context = cls + if attr.name in current_attr_names: + context = Context(line=attr.line, column=attr.column) + self._api.fail( "Attributes without a default cannot follow attributes with one", context ) found_default = found_default or (attr.has_default and attr.is_in_init) if found_kw_sentinel and self._is_kw_only_type(attr.type): - context = Context(line=attr.line, column=attr.column) if attr in attrs else ctx.cls - ctx.api.fail("There may not be more than one field with the KW_ONLY type", context) + context = cls + if attr.name in current_attr_names: + context = Context(line=attr.line, column=attr.column) + self._api.fail( + "There may not be more than one field with the KW_ONLY type", context + ) found_kw_sentinel = found_kw_sentinel or self._is_kw_only_type(attr.type) - return all_attrs def _freeze(self, attributes: list[DataclassAttribute]) -> None: """Converts all attributes to @property methods in order to emulate frozen classes. """ - info = self._ctx.cls.info + info = self._cls.info for attr in attributes: sym_node = info.names.get(attr.name) if sym_node is not None: var = sym_node.node - assert isinstance(var, Var) - var.is_property = True + if isinstance(var, Var): + var.is_property = True else: - var = attr.to_var() + var = attr.to_var(info) var.info = info var.is_property = True var._fullname = info.fullname + "." + var.name @@ -534,10 +628,10 @@ def _propertize_callables( `self` argument (it is not). """ - info = self._ctx.cls.info + info = self._cls.info for attr in attributes: if isinstance(get_proper_type(attr.type), CallableType): - var = attr.to_var() + var = attr.to_var(info) var.info = info var.is_property = True var.is_settable_property = settable @@ -556,17 +650,81 @@ def _is_kw_only_type(self, node: Type | None) -> bool: def _add_dataclass_fields_magic_attribute(self) -> None: attr_name = "__dataclass_fields__" any_type = AnyType(TypeOfAny.explicit) - field_type = self._ctx.api.named_type_or_none("dataclasses.Field", [any_type]) or any_type - attr_type = self._ctx.api.named_type( - "builtins.dict", [self._ctx.api.named_type("builtins.str"), field_type] + # For `dataclasses`, use the type `dict[str, Field[Any]]` for accuracy. For dataclass + # transforms, it's inaccurate to use `Field` since a given transform may use a completely + # different type (or none); fall back to `Any` there. + # + # In either case, we're aiming to match the Typeshed stub for `is_dataclass`, which expects + # the instance to have a `__dataclass_fields__` attribute of type `dict[str, Field[Any]]`. + if self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES: + field_type = self._api.named_type_or_none("dataclasses.Field", [any_type]) or any_type + else: + field_type = any_type + attr_type = self._api.named_type( + "builtins.dict", [self._api.named_type("builtins.str"), field_type] ) var = Var(name=attr_name, type=attr_type) - var.info = self._ctx.cls.info - var._fullname = self._ctx.cls.info.fullname + "." + attr_name - self._ctx.cls.info.names[attr_name] = SymbolTableNode( + var.info = self._cls.info + var._fullname = self._cls.info.fullname + "." + attr_name + var.is_classvar = True + self._cls.info.names[attr_name] = SymbolTableNode( kind=MDEF, node=var, plugin_generated=True ) + def _collect_field_args(self, expr: Expression) -> tuple[bool, dict[str, Expression]]: + """Returns a tuple where the first value represents whether or not + the expression is a call to dataclass.field and the second is a + dictionary of the keyword arguments that field() was called with. + """ + if ( + isinstance(expr, CallExpr) + and isinstance(expr.callee, RefExpr) + and expr.callee.fullname in self._spec.field_specifiers + ): + # field() only takes keyword arguments. + args = {} + for name, arg, kind in zip(expr.arg_names, expr.args, expr.arg_kinds): + if not kind.is_named(): + if kind.is_named(star=True): + # This means that `field` is used with `**` unpacking, + # the best we can do for now is not to fail. + # TODO: we can infer what's inside `**` and try to collect it. + message = 'Unpacking **kwargs in "field()" is not supported' + elif self._spec is not _TRANSFORM_SPEC_FOR_DATACLASSES: + # dataclasses.field can only be used with keyword args, but this + # restriction is only enforced for the *standardized* arguments to + # dataclass_transform field specifiers. If this is not a + # dataclasses.dataclass class, we can just skip positional args safely. + continue + else: + message = '"field()" does not accept positional arguments' + self._api.fail(message, expr) + return True, {} + assert name is not None + args[name] = arg + return True, args + return False, {} + + def _get_bool_arg(self, name: str, default: bool) -> bool: + # Expressions are always CallExprs (either directly or via a wrapper like Decorator), so + # we can use the helpers from common + if isinstance(self._reason, Expression): + return _get_decorator_bool_argument( + ClassDefContext(self._cls, self._reason, self._api), name, default + ) + + # Subclass/metaclass use of `typing.dataclass_transform` reads the parameters from the + # class's keyword arguments (ie `class Subclass(Parent, kwarg1=..., kwarg2=...)`) + expression = self._cls.keywords.get(name) + if expression is not None: + return require_bool_literal_argument(self._api, expression, name, default) + return default + + +def add_dataclass_tag(info: TypeInfo) -> None: + # The value is ignored, only the existence matters. + info.metadata["dataclass_tag"] = {} + def dataclass_tag_callback(ctx: ClassDefContext) -> None: """Record that we have a dataclass in the main semantic analysis pass. @@ -574,42 +732,40 @@ def dataclass_tag_callback(ctx: ClassDefContext) -> None: The later pass implemented by DataclassTransformer will use this to detect dataclasses in base classes. """ - # The value is ignored, only the existence matters. - ctx.cls.info.metadata["dataclass_tag"] = {} + add_dataclass_tag(ctx.cls.info) def dataclass_class_maker_callback(ctx: ClassDefContext) -> bool: """Hooks into the class typechecking process to add support for dataclasses.""" - transformer = DataclassTransformer(ctx) + transformer = DataclassTransformer( + ctx.cls, ctx.reason, _get_transform_spec(ctx.reason), ctx.api + ) return transformer.transform() -def _collect_field_args( - expr: Expression, ctx: ClassDefContext -) -> tuple[bool, dict[str, Expression]]: - """Returns a tuple where the first value represents whether or not - the expression is a call to dataclass.field and the second is a - dictionary of the keyword arguments that field() was called with. +def _get_transform_spec(reason: Expression) -> DataclassTransformSpec: + """Find the relevant transform parameters from the decorator/parent class/metaclass that + triggered the dataclasses plugin. + + Although the resulting DataclassTransformSpec is based on the typing.dataclass_transform + function, we also use it for traditional dataclasses.dataclass classes as well for simplicity. + In those cases, we return a default spec rather than one based on a call to + `typing.dataclass_transform`. """ - if ( - isinstance(expr, CallExpr) - and isinstance(expr.callee, RefExpr) - and expr.callee.fullname in field_makers - ): - # field() only takes keyword arguments. - args = {} - for name, arg, kind in zip(expr.arg_names, expr.args, expr.arg_kinds): - if not kind.is_named(): - if kind.is_named(star=True): - # This means that `field` is used with `**` unpacking, - # the best we can do for now is not to fail. - # TODO: we can infer what's inside `**` and try to collect it. - message = 'Unpacking **kwargs in "field()" is not supported' - else: - message = '"field()" does not accept positional arguments' - ctx.api.fail(message, expr) - return True, {} - assert name is not None - args[name] = arg - return True, args - return False, {} + if _is_dataclasses_decorator(reason): + return _TRANSFORM_SPEC_FOR_DATACLASSES + + spec = find_dataclass_transform_spec(reason) + assert spec is not None, ( + "trying to find dataclass transform spec, but reason is neither dataclasses.dataclass nor " + "decorated with typing.dataclass_transform" + ) + return spec + + +def _is_dataclasses_decorator(node: Node) -> bool: + if isinstance(node, CallExpr): + node = node.callee + if isinstance(node, RefExpr): + return node.fullname in dataclass_makers + return False diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py index 5ec37230b5ed..4d6f46860939 100644 --- a/mypy/plugins/default.py +++ b/mypy/plugins/default.py @@ -3,6 +3,7 @@ from functools import partial from typing import Callable +import mypy.errorcodes as codes from mypy import message_registry from mypy.nodes import DictExpr, IntExpr, StrExpr, UnaryExpr from mypy.plugin import ( @@ -39,9 +40,7 @@ class DefaultPlugin(Plugin): def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: from mypy.plugins import ctypes, singledispatch - if fullname in ("contextlib.contextmanager", "contextlib.asynccontextmanager"): - return contextmanager_callback - elif fullname == "ctypes.Array": + if fullname == "ctypes.Array": return ctypes.array_constructor_callback elif fullname == "functools.singledispatch": return singledispatch.create_singledispatch_function_callback @@ -148,25 +147,6 @@ def get_class_decorator_hook_2( return None -def contextmanager_callback(ctx: FunctionContext) -> Type: - """Infer a better return type for 'contextlib.contextmanager'.""" - # Be defensive, just in case. - if ctx.arg_types and len(ctx.arg_types[0]) == 1: - arg_type = get_proper_type(ctx.arg_types[0][0]) - default_return = get_proper_type(ctx.default_return_type) - if isinstance(arg_type, CallableType) and isinstance(default_return, CallableType): - # The stub signature doesn't preserve information about arguments so - # add them back here. - return default_return.copy_modified( - arg_types=arg_type.arg_types, - arg_kinds=arg_type.arg_kinds, - arg_names=arg_type.arg_names, - variables=arg_type.variables, - is_ellipsis_args=arg_type.is_ellipsis_args, - ) - return ctx.default_return_type - - def typed_dict_get_signature_callback(ctx: MethodSigContext) -> CallableType: """Try to infer a better signature type for TypedDict.get. @@ -285,7 +265,11 @@ def typed_dict_pop_callback(ctx: MethodContext) -> Type: ): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: - ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + ctx.context, + code=codes.LITERAL_REQ, + ) return AnyType(TypeOfAny.from_error) value_types = [] @@ -340,7 +324,11 @@ def typed_dict_setdefault_callback(ctx: MethodContext) -> Type: ): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: - ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + ctx.context, + code=codes.LITERAL_REQ, + ) return AnyType(TypeOfAny.from_error) default_type = ctx.arg_types[1][0] @@ -378,7 +366,11 @@ def typed_dict_delitem_callback(ctx: MethodContext) -> Type: ): keys = try_getting_str_literals(ctx.args[0][0], ctx.arg_types[0][0]) if keys is None: - ctx.api.fail(message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, ctx.context) + ctx.api.fail( + message_registry.TYPEDDICT_KEY_MUST_BE_STRING_LITERAL, + ctx.context, + code=codes.LITERAL_REQ, + ) return AnyType(TypeOfAny.from_error) for key in keys: diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index 75b301252f06..1acf42d11ee6 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -19,7 +19,7 @@ from mypy.nodes import TypeInfo from mypy.semanal_enum import ENUM_BASES from mypy.subtypes import is_equivalent -from mypy.typeops import make_simplified_union +from mypy.typeops import fixup_partial_type, make_simplified_union from mypy.types import CallableType, Instance, LiteralType, ProperType, Type, get_proper_type ENUM_NAME_ACCESS: Final = {f"{prefix}.name" for prefix in ENUM_BASES} | { @@ -77,6 +77,7 @@ def _infer_value_type_with_auto_fallback( """ if proper_type is None: return None + proper_type = get_proper_type(fixup_partial_type(proper_type)) if not (isinstance(proper_type, Instance) and proper_type.type.fullname == "enum.auto"): return proper_type assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." diff --git a/mypy/plugins/singledispatch.py b/mypy/plugins/singledispatch.py index e6009e64f789..cd6a3a9fa1cc 100644 --- a/mypy/plugins/singledispatch.py +++ b/mypy/plugins/singledispatch.py @@ -40,7 +40,7 @@ class RegisterCallableInfo(NamedTuple): def get_singledispatch_info(typ: Instance) -> SingledispatchTypeVars | None: if len(typ.args) == 2: - return SingledispatchTypeVars(*typ.args) # type: ignore + return SingledispatchTypeVars(*typ.args) # type: ignore[arg-type] return None @@ -200,7 +200,7 @@ def call_singledispatch_function_after_register_argument(ctx: MethodContext) -> """Called on the function after passing a type to register""" register_callable = ctx.type if isinstance(register_callable, Instance): - type_args = RegisterCallableInfo(*register_callable.args) # type: ignore + type_args = RegisterCallableInfo(*register_callable.args) # type: ignore[arg-type] func = get_first_arg(ctx.arg_types) if func is not None: register_function(ctx, type_args.singledispatch_obj, func, type_args.register_type) diff --git a/mypy/reachability.py b/mypy/reachability.py index c4611a13d1af..8602fc645e2b 100644 --- a/mypy/reachability.py +++ b/mypy/reachability.py @@ -13,6 +13,7 @@ CallExpr, ComparisonExpr, Expression, + FuncDef, IfStmt, Import, ImportAll, @@ -274,7 +275,7 @@ def fixed_comparison(left: Targ, op: str, right: Targ) -> int: return TRUTH_VALUE_UNKNOWN -def contains_int_or_tuple_of_ints(expr: Expression) -> None | int | tuple[int] | tuple[int, ...]: +def contains_int_or_tuple_of_ints(expr: Expression) -> None | int | tuple[int, ...]: if isinstance(expr, IntExpr): return expr.value if isinstance(expr, TupleExpr): @@ -357,3 +358,6 @@ def visit_import_from(self, node: ImportFrom) -> None: def visit_import_all(self, node: ImportAll) -> None: node.is_mypy_only = True + + def visit_func_def(self, node: FuncDef) -> None: + node.is_mypy_only = True diff --git a/mypy/report.py b/mypy/report.py index 183d0390e2c9..75c372200ca3 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -10,7 +10,6 @@ import sys import time import tokenize -import typing from abc import ABCMeta, abstractmethod from operator import attrgetter from typing import Any, Callable, Dict, Iterator, Tuple, cast @@ -26,7 +25,7 @@ from mypy.version import __version__ try: - from lxml import etree # type: ignore + from lxml import etree # type: ignore[import] LXML_INSTALLED = True except ImportError: @@ -141,8 +140,12 @@ def should_skip_path(path: str) -> bool: def iterate_python_lines(path: str) -> Iterator[tuple[int, str]]: """Return an iterator over (line number, line text) from a Python file.""" - with tokenize.open(path) as input_file: - yield from enumerate(input_file, 1) + try: + with tokenize.open(path) as input_file: + yield from enumerate(input_file, 1) + except IsADirectoryError: + # can happen with namespace packages + pass class FuncCounterVisitor(TraverserVisitor): @@ -211,7 +214,7 @@ class AnyExpressionsReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) self.counts: dict[str, tuple[int, int]] = {} - self.any_types_counter: dict[str, typing.Counter[int]] = {} + self.any_types_counter: dict[str, collections.Counter[int]] = {} def on_file( self, @@ -286,7 +289,7 @@ def _report_any_exprs(self) -> None: self._write_out_report("any-exprs.txt", column_names, rows, total_row) def _report_types_of_anys(self) -> None: - total_counter: typing.Counter[int] = collections.Counter() + total_counter: collections.Counter[int] = collections.Counter() for counter in self.any_types_counter.values(): for any_type, value in counter.items(): total_counter[any_type] += value @@ -350,7 +353,7 @@ def indentation_level(self, line_number: int) -> int | None: return None def visit_func_def(self, defn: FuncDef) -> None: - start_line = defn.get_line() - 1 + start_line = defn.line - 1 start_indent = None # When a function is decorated, sometimes the start line will point to # whitespace or comments between the decorator and the function, so @@ -373,7 +376,7 @@ def visit_func_def(self, defn: FuncDef) -> None: if cur_indent is None: # Consume the line, but don't mark it as belonging to the function yet. cur_line += 1 - elif start_indent is not None and cur_indent > start_indent: + elif cur_indent > start_indent: # A non-blank line that belongs to the function. cur_line += 1 end_line = cur_line @@ -528,7 +531,7 @@ def on_file( def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str: if lineno in visitor.any_line_map: result = "Any Types on this line: " - counter: typing.Counter[int] = collections.Counter() + counter: collections.Counter[int] = collections.Counter() for typ in visitor.any_line_map[lineno]: counter[typ.type_of_any] += 1 for any_type, occurrences in counter.items(): @@ -634,51 +637,48 @@ def on_file( etree.SubElement(class_element, "methods") lines_element = etree.SubElement(class_element, "lines") - with tokenize.open(path) as input_file: - class_lines_covered = 0 - class_total_lines = 0 - for lineno, _ in enumerate(input_file, 1): - status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) - hits = 0 - branch = False - if status == stats.TYPE_EMPTY: - continue - class_total_lines += 1 - if status != stats.TYPE_ANY: - class_lines_covered += 1 - hits = 1 - if status == stats.TYPE_IMPRECISE: - branch = True - file_info.counts[status] += 1 - line_element = etree.SubElement( - lines_element, - "line", - branch=str(branch).lower(), - hits=str(hits), - number=str(lineno), - precision=stats.precision_names[status], - ) - if branch: - line_element.attrib["condition-coverage"] = "50% (1/2)" - class_element.attrib["branch-rate"] = "0" - class_element.attrib["line-rate"] = get_line_rate( - class_lines_covered, class_total_lines + class_lines_covered = 0 + class_total_lines = 0 + for lineno, _ in iterate_python_lines(path): + status = visitor.line_map.get(lineno, stats.TYPE_EMPTY) + hits = 0 + branch = False + if status == stats.TYPE_EMPTY: + continue + class_total_lines += 1 + if status != stats.TYPE_ANY: + class_lines_covered += 1 + hits = 1 + if status == stats.TYPE_IMPRECISE: + branch = True + file_info.counts[status] += 1 + line_element = etree.SubElement( + lines_element, + "line", + branch=str(branch).lower(), + hits=str(hits), + number=str(lineno), + precision=stats.precision_names[status], ) - # parent_module is set to whichever module contains this file. For most files, we want - # to simply strip the last element off of the module. But for __init__.py files, - # the module == the parent module. - parent_module = file_info.module.rsplit(".", 1)[0] - if file_info.name.endswith("__init__.py"): - parent_module = file_info.module - - if parent_module not in self.root_package.packages: - self.root_package.packages[parent_module] = CoberturaPackage(parent_module) - current_package = self.root_package.packages[parent_module] - packages_to_update = [self.root_package, current_package] - for package in packages_to_update: - package.total_lines += class_total_lines - package.covered_lines += class_lines_covered - current_package.classes[class_name] = class_element + if branch: + line_element.attrib["condition-coverage"] = "50% (1/2)" + class_element.attrib["branch-rate"] = "0" + class_element.attrib["line-rate"] = get_line_rate(class_lines_covered, class_total_lines) + # parent_module is set to whichever module contains this file. For most files, we want + # to simply strip the last element off of the module. But for __init__.py files, + # the module == the parent module. + parent_module = file_info.module.rsplit(".", 1)[0] + if file_info.name.endswith("__init__.py"): + parent_module = file_info.module + + if parent_module not in self.root_package.packages: + self.root_package.packages[parent_module] = CoberturaPackage(parent_module) + current_package = self.root_package.packages[parent_module] + packages_to_update = [self.root_package, current_package] + for package in packages_to_update: + package.total_lines += class_total_lines + package.covered_lines += class_lines_covered + current_package.classes[class_name] = class_element def on_finish(self) -> None: self.root.attrib["line-rate"] = get_line_rate( diff --git a/mypy/semanal.py b/mypy/semanal.py index 08456c9ad845..d2fd92499679 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -51,10 +51,11 @@ from __future__ import annotations from contextlib import contextmanager -from typing import Any, Callable, Iterable, Iterator, List, Optional, Set, TypeVar, cast +from typing import Any, Callable, Collection, Iterable, Iterator, List, TypeVar, cast from typing_extensions import Final, TypeAlias as _TypeAlias from mypy import errorcodes as codes, message_registry +from mypy.constant_fold import constant_fold_expr from mypy.errorcodes import ErrorCode from mypy.errors import Errors, report_internal_error from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type @@ -69,6 +70,8 @@ from mypy.nodes import ( ARG_NAMED, ARG_POS, + ARG_STAR, + ARG_STAR2, CONTRAVARIANT, COVARIANT, GDEF, @@ -77,6 +80,7 @@ IS_ABSTRACT, LDEF, MDEF, + NOT_ABSTRACT, REVEAL_LOCALS, REVEAL_TYPE, RUNTIME_PROTOCOL_DECOS, @@ -88,7 +92,6 @@ AwaitExpr, Block, BreakStmt, - BytesExpr, CallExpr, CastExpr, ClassDef, @@ -96,6 +99,7 @@ ConditionalExpr, Context, ContinueStmt, + DataclassTransformSpec, Decorator, DelStmt, DictExpr, @@ -105,7 +109,6 @@ Expression, ExpressionStmt, FakeExpression, - FloatExpr, ForStmt, FuncBase, FuncDef, @@ -118,7 +121,6 @@ ImportBase, ImportFrom, IndexExpr, - IntExpr, LambdaExpr, ListComprehension, ListExpr, @@ -177,7 +179,7 @@ type_aliases_source_versions, typing_extensions_aliases, ) -from mypy.options import Options +from mypy.options import TYPE_VAR_TUPLE, Options from mypy.patterns import ( AsPattern, ClassPattern, @@ -193,6 +195,7 @@ Plugin, SemanticAnalyzerPluginInterface, ) +from mypy.plugins import dataclasses as dataclasses_plugin from mypy.reachability import ( ALWAYS_FALSE, ALWAYS_TRUE, @@ -207,30 +210,36 @@ from mypy.semanal_namedtuple import NamedTupleAnalyzer from mypy.semanal_newtype import NewTypeAnalyzer from mypy.semanal_shared import ( + ALLOW_INCOMPATIBLE_OVERRIDE, PRIORITY_FALLBACKS, SemanticAnalyzerInterface, calculate_tuple_fallback, + find_dataclass_transform_spec, has_placeholder, + require_bool_literal_argument, set_callable_name as set_callable_name, ) from mypy.semanal_typeddict import TypedDictAnalyzer from mypy.tvar_scope import TypeVarLikeScope from mypy.typeanal import ( + SELF_TYPE_NAMES, TypeAnalyser, TypeVarLikeList, TypeVarLikeQuery, analyze_type_alias, check_for_explicit_any, detect_diverging_alias, + find_self_type, fix_instance_types, has_any_from_unimported_type, no_subscript_builtin_alias, remove_dups, type_constructors, ) -from mypy.typeops import function_type, get_type_vars +from mypy.typeops import function_type, get_type_vars, try_getting_str_literals_from_type from mypy.types import ( ASSERT_TYPE_NAMES, + DATACLASS_TRANSFORM_NAMES, FINAL_DECORATOR_NAMES, FINAL_TYPE_NAMES, NEVER_NAMES, @@ -245,14 +254,12 @@ FunctionLike, Instance, LiteralType, - LiteralValue, NoneType, Overloaded, Parameters, ParamSpecType, PlaceholderType, ProperType, - StarType, TrivialSyntheticTypeTranslator, TupleType, Type, @@ -263,10 +270,12 @@ TypeVarLikeType, TypeVarType, UnboundType, + UnpackType, get_proper_type, get_proper_types, invalid_recursive_alias, is_named_instance, + store_argument_type, ) from mypy.typevars import fill_typevars from mypy.util import ( @@ -300,10 +309,6 @@ # available very early on. CORE_BUILTIN_CLASSES: Final = ["object", "bool", "function"] -# Subclasses can override these Var attributes with incompatible types. This can also be -# set for individual attributes using 'allow_incompatible_override' of Var. -ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") - # Used for tracking incomplete references Tag: _TypeAlias = int @@ -335,7 +340,7 @@ class SemanticAnalyzer( # Nested block depths of scopes block_depth: list[int] # TypeInfo of directly enclosing class (or None) - type: TypeInfo | None = None + _type: TypeInfo | None = None # Stack of outer classes (the second tuple item contains tvars). type_stack: list[TypeInfo | None] # Type variables bound by the current scope, be it class or function @@ -414,7 +419,7 @@ def __init__( FuncItem | GeneratorExpr | DictionaryComprehension, SymbolTable ] = {} self.imports = set() - self.type = None + self._type = None self.type_stack = [] # Are the namespaces of classes being processed complete? self.incomplete_type_stack: list[bool] = [] @@ -452,8 +457,17 @@ def __init__( # rvalues while temporarily setting this to True. self.basic_type_applications = False + # Used to temporarily enable unbound type variables in some contexts. Namely, + # in base class expressions, and in right hand sides of type aliases. Do not add + # new uses of this, as this may cause leaking `UnboundType`s to type checking. + self.allow_unbound_tvars = False + # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties + @property + def type(self) -> TypeInfo | None: + return self._type + @property def is_stub_file(self) -> bool: return self._is_stub_file @@ -466,6 +480,15 @@ def is_typeshed_stub_file(self) -> bool: def final_iteration(self) -> bool: return self._final_iteration + @contextmanager + def allow_unbound_tvars_set(self) -> Iterator[None]: + old = self.allow_unbound_tvars + self.allow_unbound_tvars = True + try: + yield + finally: + self.allow_unbound_tvars = old + # # Preparing module (performed before semantic analysis) # @@ -593,27 +616,34 @@ def refresh_top_level(self, file_node: MypyFile) -> None: def add_implicit_module_attrs(self, file_node: MypyFile) -> None: """Manually add implicit definitions of module '__name__' etc.""" + str_type: Type | None = self.named_type_or_none("builtins.str") + if str_type is None: + str_type = UnboundType("builtins.str") for name, t in implicit_module_attrs.items(): if name == "__doc__": - typ: Type = UnboundType("__builtins__.str") + typ: Type = str_type elif name == "__path__": if not file_node.is_package_init_file(): continue # Need to construct the type ourselves, to avoid issues with __builtins__.list # not being subscriptable or typing.List not getting bound - sym = self.lookup_qualified("__builtins__.list", Context()) - if not sym: - continue - node = sym.node - assert isinstance(node, TypeInfo) - typ = Instance(node, [self.str_type()]) + inst = self.named_type_or_none("builtins.list", [str_type]) + if inst is None: + assert not self.final_iteration, "Cannot find builtins.list to add __path__" + self.defer() + return + typ = inst elif name == "__annotations__": - sym = self.lookup_qualified("__builtins__.dict", Context(), suppress_errors=True) - if not sym: - continue - node = sym.node - assert isinstance(node, TypeInfo) - typ = Instance(node, [self.str_type(), AnyType(TypeOfAny.special_form)]) + inst = self.named_type_or_none( + "builtins.dict", [str_type, AnyType(TypeOfAny.special_form)] + ) + if inst is None: + assert ( + not self.final_iteration + ), "Cannot find builtins.dict to add __annotations__" + self.defer() + return + typ = inst else: assert t is not None, f"type should be specified for {name}" typ = UnboundType(t) @@ -731,12 +761,14 @@ def file_context( """ scope = self.scope self.options = options - self.errors.set_file(file_node.path, file_node.fullname, scope=scope) + self.errors.set_file(file_node.path, file_node.fullname, scope=scope, options=options) self.cur_mod_node = file_node self.cur_mod_id = file_node.fullname with scope.module_scope(self.cur_mod_id): self._is_stub_file = file_node.path.lower().endswith(".pyi") - self._is_typeshed_stub_file = is_typeshed_file(file_node.path) + self._is_typeshed_stub_file = is_typeshed_file( + options.abs_custom_typeshed_dir, file_node.path + ) self.globals = file_node.names self.tvar_scope = TypeVarLikeScope() @@ -761,7 +793,7 @@ def file_context( if active_type: scope.leave_class() self.leave_class() - self.type = None + self._type = None self.incomplete_type_stack.pop() del self.options @@ -802,7 +834,10 @@ def analyze_func_def(self, defn: FuncDef) -> None: if defn.type: assert isinstance(defn.type, CallableType) - self.update_function_type_variables(defn.type, defn) + has_self_type = self.update_function_type_variables(defn.type, defn) + else: + has_self_type = False + self.function_stack.pop() if self.is_class_scope(): @@ -813,7 +848,7 @@ def analyze_func_def(self, defn: FuncDef) -> None: assert isinstance(defn.type, CallableType) if isinstance(get_proper_type(defn.type.ret_type), AnyType): defn.type = defn.type.copy_modified(ret_type=NoneType()) - self.prepare_method_signature(defn, self.type) + self.prepare_method_signature(defn, self.type, has_self_type) # Analyze function signature with self.tvar_scope_frame(self.tvar_scope.method_frame()): @@ -830,9 +865,30 @@ def analyze_func_def(self, defn: FuncDef) -> None: self.defer(defn) return assert isinstance(result, ProperType) + if isinstance(result, CallableType): + # type guards need to have a positional argument, to spec + if ( + result.type_guard + and ARG_POS not in result.arg_kinds[self.is_class_scope() :] + and not defn.is_static + ): + self.fail( + "TypeGuard functions must have a positional argument", + result, + code=codes.VALID_TYPE, + ) + # in this case, we just kind of just ... remove the type guard. + result = result.copy_modified(type_guard=None) + + result = self.remove_unpack_kwargs(defn, result) + if has_self_type and self.type is not None: + info = self.type + if info.self_type is not None: + result.variables = [info.self_type] + list(result.variables) defn.type = result self.add_type_alias_deps(analyzer.aliases_used) self.check_function_signature(defn) + self.check_paramspec_definition(defn) if isinstance(defn, FuncDef): assert isinstance(defn.type, CallableType) defn.type = set_callable_name(defn.type, defn) @@ -852,6 +908,12 @@ def analyze_func_def(self, defn: FuncDef) -> None: and is_trivial_body(defn.body) ): defn.abstract_status = IMPLICITLY_ABSTRACT + if ( + is_trivial_body(defn.body) + and not self.is_stub_file + and defn.abstract_status != NOT_ABSTRACT + ): + defn.is_trivial_body = True if ( defn.is_coroutine @@ -872,7 +934,30 @@ def analyze_func_def(self, defn: FuncDef) -> None: defn.type = defn.type.copy_modified(ret_type=ret_type) self.wrapped_coro_return_types[defn] = defn.type - def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: + def remove_unpack_kwargs(self, defn: FuncDef, typ: CallableType) -> CallableType: + if not typ.arg_kinds or typ.arg_kinds[-1] is not ArgKind.ARG_STAR2: + return typ + last_type = get_proper_type(typ.arg_types[-1]) + if not isinstance(last_type, UnpackType): + return typ + last_type = get_proper_type(last_type.type) + if not isinstance(last_type, TypedDictType): + self.fail("Unpack item in ** argument must be a TypedDict", defn) + new_arg_types = typ.arg_types[:-1] + [AnyType(TypeOfAny.from_error)] + return typ.copy_modified(arg_types=new_arg_types) + overlap = set(typ.arg_names) & set(last_type.items) + # It is OK for TypedDict to have a key named 'kwargs'. + overlap.discard(typ.arg_names[-1]) + if overlap: + overlapped = ", ".join([f'"{name}"' for name in overlap]) + self.fail(f"Overlap between argument names and ** TypedDict items: {overlapped}", defn) + new_arg_types = typ.arg_types[:-1] + [AnyType(TypeOfAny.from_error)] + return typ.copy_modified(arg_types=new_arg_types) + # OK, everything looks right now, mark the callable type as using unpack. + new_arg_types = typ.arg_types[:-1] + [last_type] + return typ.copy_modified(arg_types=new_arg_types, unpack_kwargs=True) + + def prepare_method_signature(self, func: FuncDef, info: TypeInfo, has_self_type: bool) -> None: """Check basic signature validity and tweak annotation of self/cls argument.""" # Only non-static methods are special. functype = func.type @@ -880,14 +965,58 @@ def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: if func.name in ["__init_subclass__", "__class_getitem__"]: func.is_class = True if not func.arguments: - self.fail("Method must have at least one argument", func) + self.fail( + 'Method must have at least one argument. Did you forget the "self" argument?', + func, + ) elif isinstance(functype, CallableType): self_type = get_proper_type(functype.arg_types[0]) if isinstance(self_type, AnyType): - leading_type: Type = fill_typevars(info) + if has_self_type: + assert self.type is not None and self.type.self_type is not None + leading_type: Type = self.type.self_type + else: + leading_type = fill_typevars(info) if func.is_class or func.name == "__new__": leading_type = self.class_type(leading_type) func.type = replace_implicit_first_type(functype, leading_type) + elif has_self_type and isinstance(func.unanalyzed_type, CallableType): + if not isinstance(get_proper_type(func.unanalyzed_type.arg_types[0]), AnyType): + if self.is_expected_self_type( + self_type, func.is_class or func.name == "__new__" + ): + # This error is off by default, since it is explicitly allowed + # by the PEP 673. + self.fail( + 'Redundant "Self" annotation for the first method argument', + func, + code=codes.REDUNDANT_SELF_TYPE, + ) + else: + self.fail( + "Method cannot have explicit self annotation and Self type", func + ) + elif has_self_type: + self.fail("Static methods cannot use Self type", func) + + def is_expected_self_type(self, typ: Type, is_classmethod: bool) -> bool: + """Does this (analyzed or not) type represent the expected Self type for a method?""" + assert self.type is not None + typ = get_proper_type(typ) + if is_classmethod: + if isinstance(typ, TypeType): + return self.is_expected_self_type(typ.item, is_classmethod=False) + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + if sym is not None and sym.fullname == "typing.Type" and typ.args: + return self.is_expected_self_type(typ.args[0], is_classmethod=False) + return False + if isinstance(typ, TypeVarType): + return typ == self.type.self_type + if isinstance(typ, UnboundType): + sym = self.lookup_qualified(typ.name, typ, suppress_errors=True) + return sym is not None and sym.fullname in SELF_TYPE_NAMES + return False def set_original_def(self, previous: Node | None, new: FuncDef | Decorator) -> bool: """If 'new' conditionally redefine 'previous', set 'previous' as original @@ -912,15 +1041,41 @@ def f(): ... # Error: 'f' redefined else: return False - def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> None: + def update_function_type_variables(self, fun_type: CallableType, defn: FuncItem) -> bool: """Make any type variables in the signature of defn explicit. Update the signature of defn to contain type variable definitions - if defn is generic. + if defn is generic. Return True, if the signature contains typing.Self + type, or False otherwise. """ with self.tvar_scope_frame(self.tvar_scope.method_frame()): a = self.type_analyzer() - fun_type.variables = a.bind_function_type_variables(fun_type, defn) + fun_type.variables, has_self_type = a.bind_function_type_variables(fun_type, defn) + if has_self_type and self.type is not None: + self.setup_self_type() + return has_self_type + + def setup_self_type(self) -> None: + """Setup a (shared) Self type variable for current class. + + We intentionally don't add it to the class symbol table, + so it can be accessed only by mypy and will not cause + clashes with user defined names. + """ + assert self.type is not None + info = self.type + if info.self_type is not None: + if has_placeholder(info.self_type.upper_bound): + # Similar to regular (user defined) type variables. + self.process_placeholder( + None, + "Self upper bound", + info, + force_progress=info.self_type.upper_bound != fill_typevars(info), + ) + else: + return + info.self_type = TypeVarType("Self", f"{info.fullname}.Self", 0, [], fill_typevars(info)) def visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: self.statement = defn @@ -1006,6 +1161,8 @@ def process_overload_impl(self, defn: OverloadedFuncDef) -> None: assert self.type is not None if self.type.is_protocol: impl.abstract_status = IMPLICITLY_ABSTRACT + if impl.abstract_status != NOT_ABSTRACT: + impl.is_trivial_body = True def analyze_overload_sigs_and_impl( self, defn: OverloadedFuncDef @@ -1069,7 +1226,7 @@ def handle_missing_overload_decorators( ) else: self.fail( - "The implementation for an overloaded function " "must come last", + "The implementation for an overloaded function must come last", defn.items[idx], ) else: @@ -1093,6 +1250,7 @@ def handle_missing_overload_implementation(self, defn: OverloadedFuncDef) -> Non else: item.abstract_status = IS_ABSTRACT else: + # TODO: also allow omitting an implementation for abstract methods in ABCs? self.fail( "An overloaded function outside a stub file must have an implementation", defn, @@ -1199,7 +1357,10 @@ def analyze_function_body(self, defn: FuncItem) -> None: # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() - a.bind_function_type_variables(cast(CallableType, defn.type), defn) + typ = cast(CallableType, defn.type) + a.bind_function_type_variables(typ, defn) + for i in range(len(typ.arg_types)): + store_argument_type(defn, i, typ, self.named_type) self.function_stack.append(defn) with self.enter(defn): for arg in defn.arguments: @@ -1215,8 +1376,9 @@ def analyze_function_body(self, defn: FuncItem) -> None: self.function_stack.pop() def check_classvar_in_signature(self, typ: ProperType) -> None: + t: ProperType if isinstance(typ, Overloaded): - for t in typ.items: # type: ProperType + for t in typ.items: self.check_classvar_in_signature(t) return if not isinstance(typ, CallableType): @@ -1239,6 +1401,64 @@ def check_function_signature(self, fdef: FuncItem) -> None: elif len(sig.arg_types) > len(fdef.arguments): self.fail("Type signature has too many arguments", fdef, blocker=True) + def check_paramspec_definition(self, defn: FuncDef) -> None: + func = defn.type + assert isinstance(func, CallableType) + + if not any(isinstance(var, ParamSpecType) for var in func.variables): + return # Function does not have param spec variables + + args = func.var_arg() + kwargs = func.kw_arg() + if args is None and kwargs is None: + return # Looks like this function does not have starred args + + args_defn_type = None + kwargs_defn_type = None + for arg_def, arg_kind in zip(defn.arguments, defn.arg_kinds): + if arg_kind == ARG_STAR: + args_defn_type = arg_def.type_annotation + elif arg_kind == ARG_STAR2: + kwargs_defn_type = arg_def.type_annotation + + # This may happen on invalid `ParamSpec` args / kwargs definition, + # type analyzer sets types of arguments to `Any`, but keeps + # definition types as `UnboundType` for now. + if not ( + (isinstance(args_defn_type, UnboundType) and args_defn_type.name.endswith(".args")) + or ( + isinstance(kwargs_defn_type, UnboundType) + and kwargs_defn_type.name.endswith(".kwargs") + ) + ): + # Looks like both `*args` and `**kwargs` are not `ParamSpec` + # It might be something else, skipping. + return + + args_type = args.typ if args is not None else None + kwargs_type = kwargs.typ if kwargs is not None else None + + if ( + not isinstance(args_type, ParamSpecType) + or not isinstance(kwargs_type, ParamSpecType) + or args_type.name != kwargs_type.name + ): + if isinstance(args_defn_type, UnboundType) and args_defn_type.name.endswith(".args"): + param_name = args_defn_type.name.split(".")[0] + elif isinstance(kwargs_defn_type, UnboundType) and kwargs_defn_type.name.endswith( + ".kwargs" + ): + param_name = kwargs_defn_type.name.split(".")[0] + else: + # Fallback for cases that probably should not ever happen: + param_name = "P" + + self.fail( + f'ParamSpec must have "*args" typed as "{param_name}.args" and "**kwargs" typed as "{param_name}.kwargs"', + func, + code=codes.VALID_TYPE, + ) + def visit_decorator(self, dec: Decorator) -> None: self.statement = dec # TODO: better don't modify them at all. @@ -1273,7 +1493,13 @@ def visit_decorator(self, dec: Decorator) -> None: dec.var.is_classmethod = True self.check_decorated_function_is_method("classmethod", dec) elif refers_to_fullname( - d, ("builtins.property", "abc.abstractproperty", "functools.cached_property") + d, + ( + "builtins.property", + "abc.abstractproperty", + "functools.cached_property", + "enum.property", + ), ): removed.append(i) dec.func.is_property = True @@ -1297,6 +1523,10 @@ def visit_decorator(self, dec: Decorator) -> None: removed.append(i) else: self.fail("@final cannot be used with non-method functions", d) + elif isinstance(d, CallExpr) and refers_to_fullname( + d.callee, DATACLASS_TRANSFORM_NAMES + ): + dec.func.dataclass_transform_spec = self.parse_dataclass_transform_spec(d) elif not dec.var.is_property: # We have seen a "non-trivial" decorator before seeing @property, if # we will see a @property later, give an error, as we don't support this. @@ -1347,7 +1577,7 @@ def analyze_class(self, defn: ClassDef) -> None: defn.base_type_exprs.extend(defn.removed_base_type_exprs) defn.removed_base_type_exprs.clear() - self.update_metaclass(defn) + self.infer_metaclass_and_bases_from_compat_helpers(defn) bases = defn.base_type_exprs bases, tvar_defs, is_protocol = self.clean_up_bases_and_infer_type_variables( @@ -1363,20 +1593,27 @@ def analyze_class(self, defn: ClassDef) -> None: self.defer() self.analyze_class_keywords(defn) - result = self.analyze_base_classes(bases) - - if result is None or self.found_incomplete_ref(tag): + bases_result = self.analyze_base_classes(bases) + if bases_result is None or self.found_incomplete_ref(tag): # Something was incomplete. Defer current target. self.mark_incomplete(defn.name, defn) return - base_types, base_error = result + base_types, base_error = bases_result if any(isinstance(base, PlaceholderType) for base, _ in base_types): # We need to know the TypeInfo of each base to construct the MRO. Placeholder types # are okay in nested positions, since they can't affect the MRO. self.mark_incomplete(defn.name, defn) return + declared_metaclass, should_defer, any_meta = self.get_declared_metaclass( + defn.name, defn.metaclass + ) + if should_defer or self.found_incomplete_ref(tag): + # Metaclass was not ready. Defer current target. + self.mark_incomplete(defn.name, defn) + return + if self.analyze_typeddict_classdef(defn): if defn.info: self.setup_type_vars(defn, tvar_defs) @@ -1391,11 +1628,13 @@ def analyze_class(self, defn: ClassDef) -> None: self.setup_type_vars(defn, tvar_defs) if base_error: defn.info.fallback_to_any = True + if any_meta: + defn.info.meta_fallback_to_any = True with self.scope.class_scope(defn.info): self.configure_base_classes(defn, base_types) defn.info.is_protocol = is_protocol - self.analyze_metaclass(defn) + self.recalculate_metaclass(defn, declared_metaclass) defn.info.runtime_protocol = False for decorator in defn.decorators: self.analyze_class_decorator(defn, decorator) @@ -1409,7 +1648,7 @@ def setup_type_vars(self, defn: ClassDef, tvar_defs: list[TypeVarLikeType]) -> N def setup_alias_type_vars(self, defn: ClassDef) -> None: assert defn.info.special_alias is not None - defn.info.special_alias.alias_tvars = list(defn.info.type_vars) + defn.info.special_alias.alias_tvars = list(defn.type_vars) target = defn.info.special_alias.target assert isinstance(target, ProperType) if isinstance(target, TypedDictType): @@ -1442,8 +1681,8 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> bool: for decorator in defn.decorators: decorator.accept(self) if isinstance(decorator, RefExpr): - if decorator.fullname in FINAL_DECORATOR_NAMES: - self.fail("@final cannot be used with TypedDict", decorator) + if decorator.fullname in FINAL_DECORATOR_NAMES and info is not None: + info.is_final = True if info is None: self.mark_incomplete(defn.name, defn) else: @@ -1463,7 +1702,8 @@ def analyze_namedtuple_classdef( ): # Don't reprocess everything. We just need to process methods defined # in the named tuple class body. - is_named_tuple, info = True, defn.info # type: bool, Optional[TypeInfo] + is_named_tuple = True + info: TypeInfo | None = defn.info else: is_named_tuple, info = self.named_tuple_analyzer.analyze_namedtuple_classdef( defn, self.is_stub_file, self.is_func_scope() @@ -1488,6 +1728,11 @@ def apply_class_plugin_hooks(self, defn: ClassDef) -> None: decorator_name = self.get_fullname_for_hook(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook(decorator_name) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and find_dataclass_transform_spec(decorator): + hook = dataclasses_plugin.dataclass_tag_callback if hook: hook(ClassDefContext(defn, decorator, self)) @@ -1505,6 +1750,12 @@ def apply_class_plugin_hooks(self, defn: ClassDef) -> None: if hook: hook(ClassDefContext(defn, base_expr, self)) + # Check if the class definition itself triggers a dataclass transform (via a parent class/ + # metaclass) + spec = find_dataclass_transform_spec(defn) + if spec is not None: + dataclasses_plugin.add_dataclass_tag(defn.info) + def get_fullname_for_hook(self, expr: Expression) -> str | None: if isinstance(expr, CallExpr): return self.get_fullname_for_hook(expr.callee) @@ -1531,7 +1782,7 @@ def enter_class(self, info: TypeInfo) -> None: self.locals.append(None) # Add class scope self.is_comprehension_stack.append(False) self.block_depth.append(-1) # The class body increments this to 0 - self.type = info + self._type = info self.missing_names.append(set()) def leave_class(self) -> None: @@ -1539,7 +1790,7 @@ def leave_class(self) -> None: self.block_depth.pop() self.locals.pop() self.is_comprehension_stack.pop() - self.type = self.type_stack.pop() + self._type = self.type_stack.pop() self.missing_names.pop() def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None: @@ -1552,6 +1803,10 @@ def analyze_class_decorator(self, defn: ClassDef, decorator: Expression) -> None self.fail("@runtime_checkable can only be used with protocol classes", defn) elif decorator.fullname in FINAL_DECORATOR_NAMES: defn.info.is_final = True + elif isinstance(decorator, CallExpr) and refers_to_fullname( + decorator.callee, DATACLASS_TRANSFORM_NAMES + ): + defn.info.dataclass_transform_spec = self.parse_dataclass_transform_spec(decorator) def clean_up_bases_and_infer_type_variables( self, defn: ClassDef, base_type_exprs: list[Expression], context: Context @@ -1573,6 +1828,8 @@ class Foo(Bar, Generic[T]): ... declared_tvars: TypeVarLikeList = [] is_protocol = False for i, base_expr in enumerate(base_type_exprs): + if isinstance(base_expr, StarExpr): + base_expr.valid = True self.analyze_type_expr(base_expr) try: @@ -1645,10 +1902,16 @@ def analyze_class_typevar_declaration(self, base: Type) -> tuple[TypeVarLikeList ): is_proto = sym.node.fullname != "typing.Generic" tvars: TypeVarLikeList = [] + have_type_var_tuple = False for arg in unbound.args: tag = self.track_incomplete_refs() tvar = self.analyze_unbound_tvar(arg) if tvar: + if isinstance(tvar[1], TypeVarTupleExpr): + if have_type_var_tuple: + self.fail("Can only use one type var tuple in a class def", base) + continue + have_type_var_tuple = True tvars.append(tvar) elif not self.found_incomplete_ref(tag): self.fail("Free type variable expected in %s[...]" % sym.node.name, base) @@ -1667,11 +1930,19 @@ def analyze_unbound_tvar(self, t: Type) -> tuple[str, TypeVarLikeExpr] | None: # It's bound by our type variable scope return None return unbound.name, sym.node - if sym and isinstance(sym.node, TypeVarTupleExpr): - if sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): - # It's bound by our type variable scope + if sym and sym.fullname in ("typing.Unpack", "typing_extensions.Unpack"): + inner_t = unbound.args[0] + if not isinstance(inner_t, UnboundType): return None - return unbound.name, sym.node + inner_unbound = inner_t + inner_sym = self.lookup_qualified(inner_unbound.name, inner_unbound) + if inner_sym and isinstance(inner_sym.node, PlaceholderNode): + self.record_incomplete_ref() + if inner_sym and isinstance(inner_sym.node, TypeVarTupleExpr): + if inner_sym.fullname and not self.tvar_scope.allow_binding(inner_sym.fullname): + # It's bound by our type variable scope + return None + return inner_unbound.name, inner_sym.node if sym is None or not isinstance(sym.node, TypeVarExpr): return None elif sym.fullname and not self.tvar_scope.allow_binding(sym.fullname): @@ -1693,7 +1964,7 @@ def get_all_bases_tvars( except TypeTranslationError: # This error will be caught later. continue - base_tvars = base.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvars.extend(base_tvars) return remove_dups(tvars) @@ -1711,7 +1982,7 @@ def get_and_bind_all_tvars(self, type_exprs: list[Expression]) -> list[TypeVarLi except TypeTranslationError: # This error will be caught later. continue - base_tvars = base.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) + base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope)) tvars.extend(base_tvars) tvars = remove_dups(tvars) # Variables are defined in order of textual appearance. tvar_defs = [] @@ -1815,7 +2086,9 @@ def analyze_base_classes( continue try: - base = self.expr_to_analyzed_type(base_expr, allow_placeholder=True) + base = self.expr_to_analyzed_type( + base_expr, allow_placeholder=True, allow_type_any=True + ) except TypeTranslationError: name = self.get_name_repr_of_expr(base_expr) if isinstance(base_expr, CallExpr): @@ -1901,7 +2174,9 @@ def configure_tuple_base_class(self, defn: ClassDef, base: TupleType) -> Instanc self.fail("Class has two incompatible bases derived from tuple", defn) defn.has_incompatible_baseclass = True if info.special_alias and has_placeholder(info.special_alias.target): - self.defer(force_progress=True) + self.process_placeholder( + None, "tuple base", defn, force_progress=base != info.tuple_type + ) info.update_tuple_type(base) self.setup_alias_type_vars(defn) @@ -1940,7 +2215,7 @@ def calculate_class_mro( if hook: hook(ClassDefContext(defn, FakeExpression(), self)) - def update_metaclass(self, defn: ClassDef) -> None: + def infer_metaclass_and_bases_from_compat_helpers(self, defn: ClassDef) -> None: """Lookup for special metaclass declarations, and update defn fields accordingly. * six.with_metaclass(M, B1, B2, ...) @@ -1954,7 +2229,7 @@ def update_metaclass(self, defn: ClassDef) -> None: if len(defn.base_type_exprs) == 1: base_expr = defn.base_type_exprs[0] if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): - base_expr.accept(self) + self.analyze_type_expr(base_expr) if ( base_expr.callee.fullname in { @@ -2018,30 +2293,42 @@ def is_base_class(self, t: TypeInfo, s: TypeInfo) -> bool: visited.add(base.type) return False - def analyze_metaclass(self, defn: ClassDef) -> None: - if defn.metaclass: + def get_declared_metaclass( + self, name: str, metaclass_expr: Expression | None + ) -> tuple[Instance | None, bool, bool]: + """Get declared metaclass from metaclass expression. + + Returns a tuple of three values: + * A metaclass instance or None + * A boolean indicating whether we should defer + * A boolean indicating whether we should set metaclass Any fallback + (either for Any metaclass or invalid/dynamic metaclass). + + The two boolean flags can only be True if instance is None. + """ + declared_metaclass = None + if metaclass_expr: metaclass_name = None - if isinstance(defn.metaclass, NameExpr): - metaclass_name = defn.metaclass.name - elif isinstance(defn.metaclass, MemberExpr): - metaclass_name = get_member_expr_fullname(defn.metaclass) + if isinstance(metaclass_expr, NameExpr): + metaclass_name = metaclass_expr.name + elif isinstance(metaclass_expr, MemberExpr): + metaclass_name = get_member_expr_fullname(metaclass_expr) if metaclass_name is None: - self.fail(f'Dynamic metaclass not supported for "{defn.name}"', defn.metaclass) - return - sym = self.lookup_qualified(metaclass_name, defn.metaclass) + self.fail(f'Dynamic metaclass not supported for "{name}"', metaclass_expr) + return None, False, True + sym = self.lookup_qualified(metaclass_name, metaclass_expr) if sym is None: # Probably a name error - it is already handled elsewhere - return + return None, False, True if isinstance(sym.node, Var) and isinstance(get_proper_type(sym.node.type), AnyType): - # 'Any' metaclass -- just ignore it. - # - # TODO: A better approach would be to record this information - # and assume that the type object supports arbitrary - # attributes, similar to an 'Any' base class. - return + if self.options.disallow_subclassing_any: + self.fail( + f'Class cannot use "{sym.node.name}" as a metaclass (has type "Any")', + metaclass_expr, + ) + return None, False, True if isinstance(sym.node, PlaceholderNode): - self.defer(defn) - return + return None, True, False # defer later in the caller # Support type aliases, like `_Meta: TypeAlias = type` if ( @@ -2055,16 +2342,20 @@ def analyze_metaclass(self, defn: ClassDef) -> None: metaclass_info = sym.node if not isinstance(metaclass_info, TypeInfo) or metaclass_info.tuple_type is not None: - self.fail(f'Invalid metaclass "{metaclass_name}"', defn.metaclass) - return + self.fail(f'Invalid metaclass "{metaclass_name}"', metaclass_expr) + return None, False, False if not metaclass_info.is_metaclass(): self.fail( - 'Metaclasses not inheriting from "type" are not supported', defn.metaclass + 'Metaclasses not inheriting from "type" are not supported', metaclass_expr ) - return + return None, False, False inst = fill_typevars(metaclass_info) assert isinstance(inst, Instance) - defn.info.declared_metaclass = inst + declared_metaclass = inst + return declared_metaclass, False, False + + def recalculate_metaclass(self, defn: ClassDef, declared_metaclass: Instance | None) -> None: + defn.info.declared_metaclass = declared_metaclass defn.info.metaclass_type = defn.info.calculate_metaclass_type() if any(info.is_protocol for info in defn.info.mro): if ( @@ -2076,16 +2367,10 @@ def analyze_metaclass(self, defn: ClassDef) -> None: abc_meta = self.named_type_or_none("abc.ABCMeta", []) if abc_meta is not None: # May be None in tests with incomplete lib-stub. defn.info.metaclass_type = abc_meta - if defn.info.metaclass_type is None: - # Inconsistency may happen due to multiple baseclasses even in classes that - # do not declare explicit metaclass, but it's harder to catch at this stage - if defn.metaclass is not None: - self.fail(f'Inconsistent metaclass structure for "{defn.name}"', defn) - else: - if defn.info.metaclass_type.type.has_base("enum.EnumMeta"): - defn.info.is_enum = True - if defn.type_vars: - self.fail("Enum class cannot be generic", defn) + if defn.info.metaclass_type and defn.info.metaclass_type.type.has_base("enum.EnumMeta"): + defn.info.is_enum = True + if defn.type_vars: + self.fail("Enum class cannot be generic", defn) # # Imports @@ -2105,13 +2390,33 @@ def visit_import(self, i: Import) -> None: base_id = id.split(".")[0] imported_id = base_id module_public = use_implicit_reexport - self.add_module_symbol( - base_id, - imported_id, - context=i, - module_public=module_public, - module_hidden=not module_public, - ) + + if base_id in self.modules: + node = self.modules[base_id] + if self.is_func_scope(): + kind = LDEF + elif self.type is not None: + kind = MDEF + else: + kind = GDEF + symbol = SymbolTableNode( + kind, node, module_public=module_public, module_hidden=not module_public + ) + self.add_imported_symbol( + imported_id, + symbol, + context=i, + module_public=module_public, + module_hidden=not module_public, + ) + else: + self.add_unknown_imported_symbol( + imported_id, + context=i, + target_name=base_id, + module_public=module_public, + module_hidden=not module_public, + ) def visit_import_from(self, imp: ImportFrom) -> None: self.statement = imp @@ -2173,10 +2478,20 @@ def visit_import_from(self, imp: ImportFrom) -> None: ) continue - if node and not node.module_hidden: + if node: self.process_imported_symbol( node, module_id, id, imported_id, fullname, module_public, context=imp ) + if node.module_hidden: + self.report_missing_module_attribute( + module_id, + id, + imported_id, + module_public=module_public, + module_hidden=not module_public, + context=imp, + add_unknown_imported_symbol=False, + ) elif module and not missing_submodule: # Target module exists but the imported name is missing or hidden. self.report_missing_module_attribute( @@ -2237,19 +2552,6 @@ def process_imported_symbol( module_hidden=module_hidden, becomes_typeinfo=True, ) - existing_symbol = self.globals.get(imported_id) - if ( - existing_symbol - and not isinstance(existing_symbol.node, PlaceholderNode) - and not isinstance(node.node, PlaceholderNode) - ): - # Import can redefine a variable. They get special treatment. - if self.process_import_over_existing_name(imported_id, existing_symbol, node, context): - return - if existing_symbol and isinstance(node.node, PlaceholderNode): - # Imports are special, some redefinitions are allowed, so wait until - # we know what is the new symbol node. - return # NOTE: we take the original node even for final `Var`s. This is to support # a common pattern when constants are re-exported (same applies to import *). self.add_imported_symbol( @@ -2264,6 +2566,7 @@ def report_missing_module_attribute( module_public: bool, module_hidden: bool, context: Node, + add_unknown_imported_symbol: bool = True, ) -> None: # Missing attribute. if self.is_incomplete_namespace(import_id): @@ -2277,25 +2580,25 @@ def report_missing_module_attribute( # Suggest alternatives, if any match is found. module = self.modules.get(import_id) if module: - if not self.options.implicit_reexport and source_id in module.names.keys(): + if source_id in module.names.keys() and not module.names[source_id].module_public: message = ( - 'Module "{}" does not explicitly export attribute "{}"' - "; implicit reexport disabled".format(import_id, source_id) + f'Module "{import_id}" does not explicitly export attribute "{source_id}"' ) else: alternatives = set(module.names.keys()).difference({source_id}) - matches = best_matches(source_id, alternatives)[:3] + matches = best_matches(source_id, alternatives, n=3) if matches: suggestion = f"; maybe {pretty_seq(matches, 'or')}?" message += f"{suggestion}" self.fail(message, context, code=codes.ATTR_DEFINED) - self.add_unknown_imported_symbol( - imported_id, - context, - target_name=None, - module_public=module_public, - module_hidden=not module_public, - ) + if add_unknown_imported_symbol: + self.add_unknown_imported_symbol( + imported_id, + context, + target_name=None, + module_public=module_public, + module_hidden=not module_public, + ) if import_id == "typing": # The user probably has a missing definition in a test fixture. Let's verify. @@ -2306,6 +2609,16 @@ def report_missing_module_attribute( ): # Yes. Generate a helpful note. self.msg.add_fixture_note(fullname, context) + else: + typing_extensions = self.modules.get("typing_extensions") + if typing_extensions and source_id in typing_extensions.names: + self.msg.note( + f"Use `from typing_extensions import {source_id}` instead", context + ) + self.msg.note( + "See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module", + context, + ) def process_import_over_existing_name( self, @@ -2366,14 +2679,9 @@ def visit_import_all(self, i: ImportAll) -> None: if isinstance(node.node, MypyFile): # Star import of submodule from a package, add it as a dependency. self.imports.add(node.node.fullname) - existing_symbol = self.lookup_current_scope(name) - if existing_symbol and not isinstance(node.node, PlaceholderNode): - # Import can redefine a variable. They get special treatment. - if self.process_import_over_existing_name(name, existing_symbol, node, i): - continue # `from x import *` always reexports symbols self.add_imported_symbol( - name, node, i, module_public=True, module_hidden=False + name, node, context=i, module_public=True, module_hidden=False ) else: @@ -2386,8 +2694,33 @@ def visit_import_all(self, i: ImportAll) -> None: def visit_assignment_expr(self, s: AssignmentExpr) -> None: s.value.accept(self) + if self.is_func_scope(): + if not self.check_valid_comprehension(s): + return self.analyze_lvalue(s.target, escape_comprehensions=True, has_explicit_value=True) + def check_valid_comprehension(self, s: AssignmentExpr) -> bool: + """Check that assignment expression is not nested within comprehension at class scope. + + class C: + [(j := i) for i in [1, 2, 3]] + is a syntax error that is not enforced by Python parser, but at later steps. + """ + for i, is_comprehension in enumerate(reversed(self.is_comprehension_stack)): + if not is_comprehension and i < len(self.locals) - 1: + if self.locals[-1 - i] is None: + self.fail( + "Assignment expression within a comprehension" + " cannot be used in a class body", + s, + code=codes.SYNTAX, + serious=True, + blocker=True, + ) + return False + break + return True + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: self.statement = s @@ -2402,8 +2735,15 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: # But we can't use a full visit because it may emit extra incomplete refs (namely # when analysing any type applications there) thus preventing the further analysis. # To break the tie, we first analyse rvalue partially, if it can be a type alias. - with self.basic_type_applications_set(s): + if self.can_possibly_be_type_form(s): + old_basic_type_applications = self.basic_type_applications + self.basic_type_applications = True + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) + self.basic_type_applications = old_basic_type_applications + else: s.rvalue.accept(self) + if self.found_incomplete_ref(tag) or self.should_wait_rhs(s.rvalue): # Initializer couldn't be fully analyzed. Defer the current node and give up. # Make sure that if we skip the definition of some local names, they can't be @@ -2411,10 +2751,11 @@ def visit_assignment_stmt(self, s: AssignmentStmt) -> None: for expr in names_modified_by_assignment(s): self.mark_incomplete(expr.name, expr) return - if self.can_possibly_be_index_alias(s): + if self.can_possibly_be_type_form(s): # Now re-visit those rvalues that were we skipped type applications above. # This should be safe as generally semantic analyzer is idempotent. - s.rvalue.accept(self) + with self.allow_unbound_tvars_set(): + s.rvalue.accept(self) # The r.h.s. is now ready to be classified, first check if it is a special form: special_form = False @@ -2553,16 +2894,19 @@ def can_be_type_alias(self, rv: Expression, allow_none: bool = False) -> bool: return True return False - def can_possibly_be_index_alias(self, s: AssignmentStmt) -> bool: - """Like can_be_type_alias(), but simpler and doesn't require analyzed rvalue. + def can_possibly_be_type_form(self, s: AssignmentStmt) -> bool: + """Like can_be_type_alias(), but simpler and doesn't require fully analyzed rvalue. - Instead, use lvalues/annotations structure to figure out whether this can - potentially be a type alias definition. Another difference from above function - is that we are only interested IndexExpr and OpExpr rvalues, since only those + Instead, use lvalues/annotations structure to figure out whether this can potentially be + a type alias definition, NamedTuple, or TypedDict. Another difference from above function + is that we are only interested IndexExpr, CallExpr and OpExpr rvalues, since only those can be potentially recursive (things like `A = A` are never valid). """ if len(s.lvalues) > 1: return False + if isinstance(s.rvalue, CallExpr) and isinstance(s.rvalue.callee, RefExpr): + ref = s.rvalue.callee.fullname + return ref in TPDICT_NAMES or ref in TYPED_NAMEDTUPLE_NAMES if not isinstance(s.lvalues[0], NameExpr): return False if s.unanalyzed_type is not None and not self.is_pep_613(s): @@ -2572,17 +2916,6 @@ def can_possibly_be_index_alias(self, s: AssignmentStmt) -> bool: # Something that looks like Foo = Bar[Baz, ...] return True - @contextmanager - def basic_type_applications_set(self, s: AssignmentStmt) -> Iterator[None]: - old = self.basic_type_applications - # As an optimization, only use the double visit logic if this - # can possibly be a recursive type alias. - self.basic_type_applications = self.can_possibly_be_index_alias(s) - try: - yield - finally: - self.basic_type_applications = old - def is_type_ref(self, rv: Expression, bare: bool = False) -> bool: """Does this expression refer to a type? @@ -2693,30 +3026,32 @@ def analyze_namedtuple_assign(self, s: AssignmentStmt) -> bool: return False lvalue = s.lvalues[0] name = lvalue.name - internal_name, info, tvar_defs = self.named_tuple_analyzer.check_namedtuple( - s.rvalue, name, self.is_func_scope() - ) - if internal_name is None: - return False - if isinstance(lvalue, MemberExpr): - self.fail("NamedTuple type as an attribute is not supported", lvalue) - return False - if internal_name != name: - self.fail( - 'First argument to namedtuple() should be "{}", not "{}"'.format( - name, internal_name - ), - s.rvalue, - code=codes.NAME_MATCH, + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + internal_name, info, tvar_defs = self.named_tuple_analyzer.check_namedtuple( + s.rvalue, name, self.is_func_scope() ) + if internal_name is None: + return False + if isinstance(lvalue, MemberExpr): + self.fail("NamedTuple type as an attribute is not supported", lvalue) + return False + if internal_name != name: + self.fail( + 'First argument to namedtuple() should be "{}", not "{}"'.format( + name, internal_name + ), + s.rvalue, + code=codes.NAME_MATCH, + ) + return True + # Yes, it's a valid namedtuple, but defer if it is not ready. + if not info: + self.mark_incomplete(name, lvalue, becomes_typeinfo=True) + else: + self.setup_type_vars(info.defn, tvar_defs) + self.setup_alias_type_vars(info.defn) return True - # Yes, it's a valid namedtuple, but defer if it is not ready. - if not info: - self.mark_incomplete(name, lvalue, becomes_typeinfo=True) - else: - self.setup_type_vars(info.defn, tvar_defs) - self.setup_alias_type_vars(info.defn) - return True def analyze_typeddict_assign(self, s: AssignmentStmt) -> bool: """Check if s defines a typed dict.""" @@ -2730,22 +3065,24 @@ def analyze_typeddict_assign(self, s: AssignmentStmt) -> bool: return False lvalue = s.lvalues[0] name = lvalue.name - is_typed_dict, info, tvar_defs = self.typed_dict_analyzer.check_typeddict( - s.rvalue, name, self.is_func_scope() - ) - if not is_typed_dict: - return False - if isinstance(lvalue, MemberExpr): - self.fail("TypedDict type as attribute is not supported", lvalue) - return False - # Yes, it's a valid typed dict, but defer if it is not ready. - if not info: - self.mark_incomplete(name, lvalue, becomes_typeinfo=True) - else: - defn = info.defn - self.setup_type_vars(defn, tvar_defs) - self.setup_alias_type_vars(defn) - return True + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + is_typed_dict, info, tvar_defs = self.typed_dict_analyzer.check_typeddict( + s.rvalue, name, self.is_func_scope() + ) + if not is_typed_dict: + return False + if isinstance(lvalue, MemberExpr): + self.fail("TypedDict type as attribute is not supported", lvalue) + return False + # Yes, it's a valid typed dict, but defer if it is not ready. + if not info: + self.mark_incomplete(name, lvalue, becomes_typeinfo=True) + else: + defn = info.defn + self.setup_type_vars(defn, tvar_defs) + self.setup_alias_type_vars(defn) + return True def analyze_lvalues(self, s: AssignmentStmt) -> None: # We cannot use s.type, because analyze_simple_literal_type() will set it. @@ -2775,13 +3112,13 @@ def analyze_lvalues(self, s: AssignmentStmt) -> None: def apply_dynamic_class_hook(self, s: AssignmentStmt) -> None: if not isinstance(s.rvalue, CallExpr): return - fname = None + fname = "" call = s.rvalue while True: if isinstance(call.callee, RefExpr): fname = call.callee.fullname # check if method call - if fname is None and isinstance(call.callee, MemberExpr): + if not fname and isinstance(call.callee, MemberExpr): callee_expr = call.callee.expr if isinstance(callee_expr, RefExpr) and callee_expr.fullname: method_name = call.callee.name @@ -2884,7 +3221,8 @@ def store_final_status(self, s: AssignmentStmt) -> None: node = s.lvalues[0].node if isinstance(node, Var): node.is_final = True - node.final_value = self.unbox_literal(s.rvalue) + if s.type: + node.final_value = constant_fold_expr(s.rvalue, self.cur_mod_id) if self.is_class_scope() and ( isinstance(s.rvalue, TempNode) and s.rvalue.no_rhs ): @@ -2944,13 +3282,6 @@ def flatten_lvalues(self, lvalues: list[Expression]) -> list[Expression]: res.append(lv) return res - def unbox_literal(self, e: Expression) -> int | float | bool | str | None: - if isinstance(e, (IntExpr, FloatExpr, StrExpr)): - return e.value - elif isinstance(e, NameExpr) and e.name in ("True", "False"): - return True if e.name == "True" else False - return None - def process_type_annotation(self, s: AssignmentStmt) -> None: """Analyze type annotation or infer simple literal type.""" if s.type: @@ -2959,6 +3290,7 @@ def process_type_annotation(self, s: AssignmentStmt) -> None: analyzed = self.anal_type(s.type, allow_tuple_literal=allow_tuple_literal) # Don't store not ready types (including placeholders). if analyzed is None or has_placeholder(analyzed): + self.defer(s) return s.type = analyzed if ( @@ -2980,7 +3312,13 @@ def process_type_annotation(self, s: AssignmentStmt) -> None: self.fail("All protocol members must have explicitly declared types", s) # Set the type if the rvalue is a simple literal (even if the above error occurred). if len(s.lvalues) == 1 and isinstance(s.lvalues[0], RefExpr): - if s.lvalues[0].is_inferred_def: + ref_expr = s.lvalues[0] + safe_literal_inference = True + if self.type and isinstance(ref_expr, NameExpr) and len(self.type.mro) > 1: + # Check if there is a definition in supertype. If yes, we can't safely + # decide here what to infer: int or Literal[42]. + safe_literal_inference = self.type.mro[1].get(ref_expr.name) is None + if safe_literal_inference and ref_expr.is_inferred_def: s.type = self.analyze_simple_literal_type(s.rvalue, s.is_final_def) if s.type: # Store type into nodes. @@ -2999,77 +3337,79 @@ def is_annotated_protocol_member(self, s: AssignmentStmt) -> bool: def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Type | None: """Return builtins.int if rvalue is an int literal, etc. - If this is a 'Final' context, we return "Literal[...]" instead.""" - if self.options.semantic_analysis_only or self.function_stack: - # Skip this if we're only doing the semantic analysis pass. - # This is mostly to avoid breaking unit tests. - # Also skip inside a function; this is to avoid confusing + If this is a 'Final' context, we return "Literal[...]" instead. + """ + if self.function_stack: + # Skip inside a function; this is to avoid confusing # the code that handles dead code due to isinstance() # inside type variables with value restrictions (like # AnyStr). return None - if isinstance(rvalue, FloatExpr): - return self.named_type_or_none("builtins.float") - - value: LiteralValue | None = None - type_name: str | None = None - if isinstance(rvalue, IntExpr): - value, type_name = rvalue.value, "builtins.int" - if isinstance(rvalue, StrExpr): - value, type_name = rvalue.value, "builtins.str" - if isinstance(rvalue, BytesExpr): - value, type_name = rvalue.value, "builtins.bytes" - - if type_name is not None: - assert value is not None - typ = self.named_type_or_none(type_name) - if typ and is_final: - return typ.copy_modified( - last_known_value=LiteralType( - value=value, fallback=typ, line=typ.line, column=typ.column - ) - ) - return typ - return None + value = constant_fold_expr(rvalue, self.cur_mod_id) + if value is None: + return None + + if isinstance(value, bool): + type_name = "builtins.bool" + elif isinstance(value, int): + type_name = "builtins.int" + elif isinstance(value, str): + type_name = "builtins.str" + elif isinstance(value, float): + type_name = "builtins.float" + + typ = self.named_type_or_none(type_name) + if typ and is_final: + return typ.copy_modified(last_known_value=LiteralType(value=value, fallback=typ)) + return typ def analyze_alias( - self, rvalue: Expression, allow_placeholder: bool = False - ) -> tuple[Type | None, list[str], set[str], list[str]]: + self, name: str, rvalue: Expression, allow_placeholder: bool = False + ) -> tuple[Type | None, list[TypeVarLikeType], set[str], list[str]]: """Check if 'rvalue' is a valid type allowed for aliasing (e.g. not a type variable). If yes, return the corresponding type, a list of qualified type variable names for generic aliases, a set of names the alias depends on, and a list of type variables if the alias is generic. - An schematic example for the dependencies: + A schematic example for the dependencies: A = int B = str analyze_alias(Dict[A, B])[2] == {'__main__.A', '__main__.B'} """ dynamic = bool(self.function_stack and self.function_stack[-1].is_dynamic()) global_scope = not self.type and not self.function_stack - res = analyze_type_alias( - rvalue, - self, - self.tvar_scope, - self.plugin, - self.options, - self.is_typeshed_stub_file, - allow_placeholder=allow_placeholder, - in_dynamic_func=dynamic, - global_scope=global_scope, - ) - typ: Type | None = None - if res: - typ, depends_on = res - found_type_vars = typ.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) - alias_tvars = [name for (name, node) in found_type_vars] - qualified_tvars = [node.fullname for (name, node) in found_type_vars] - else: - alias_tvars = [] - depends_on = set() - qualified_tvars = [] - return typ, alias_tvars, depends_on, qualified_tvars + try: + typ = expr_to_unanalyzed_type(rvalue, self.options, self.is_stub_file) + except TypeTranslationError: + self.fail( + "Invalid type alias: expression is not a valid type", rvalue, code=codes.VALID_TYPE + ) + return None, [], set(), [] + + found_type_vars = typ.accept(TypeVarLikeQuery(self, self.tvar_scope)) + tvar_defs: list[TypeVarLikeType] = [] + namespace = self.qualified_name(name) + with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)): + for name, tvar_expr in found_type_vars: + tvar_def = self.tvar_scope.bind_new(name, tvar_expr) + tvar_defs.append(tvar_def) + + analyzed, depends_on = analyze_type_alias( + typ, + self, + self.tvar_scope, + self.plugin, + self.options, + self.is_typeshed_stub_file, + allow_placeholder=allow_placeholder, + in_dynamic_func=dynamic, + global_scope=global_scope, + allowed_alias_tvars=tvar_defs, + ) + + qualified_tvars = [node.fullname for _name, node in found_type_vars] + return analyzed, tvar_defs, depends_on, qualified_tvars def is_pep_613(self, s: AssignmentStmt) -> bool: if s.unanalyzed_type is not None and isinstance(s.unanalyzed_type, UnboundType): @@ -3149,19 +3489,17 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: res: Type | None = None if self.is_none_alias(rvalue): res = NoneType() - alias_tvars, depends_on, qualified_tvars = ( - [], - set(), - [], - ) # type: List[str], Set[str], List[str] + alias_tvars: list[TypeVarLikeType] = [] + depends_on: set[str] = set() + qualified_tvars: list[str] = [] else: tag = self.track_incomplete_refs() res, alias_tvars, depends_on, qualified_tvars = self.analyze_alias( - rvalue, allow_placeholder=True + lvalue.name, rvalue, allow_placeholder=True ) if not res: return False - if self.options.enable_recursive_aliases and not self.is_func_scope(): + if not self.options.disable_recursive_aliases and not self.is_func_scope(): # Only marking incomplete for top-level placeholders makes recursive aliases like # `A = Sequence[str | A]` valid here, similar to how we treat base classes in class # definitions, allowing `class str(Sequence[str]): ...` @@ -3204,7 +3542,11 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: no_args=no_args, eager=eager, ) - if isinstance(s.rvalue, (IndexExpr, CallExpr)): # CallExpr is for `void = type(None)` + if isinstance(s.rvalue, (IndexExpr, CallExpr, OpExpr)) and ( + not isinstance(rvalue, OpExpr) + or (self.options.python_version >= (3, 10) or self.is_stub_file) + ): + # Note: CallExpr is for "void = type(None)" and OpExpr is for "X | Y" union syntax. s.rvalue.analyzed = TypeAliasExpr(alias_node) s.rvalue.analyzed.line = s.line # we use the column from resulting target, to get better location for errors @@ -3241,6 +3583,12 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: current_node = existing.node if existing else alias_node assert isinstance(current_node, TypeAlias) self.disable_invalid_recursive_aliases(s, current_node) + if self.is_class_scope(): + assert self.type is not None + if self.type.is_protocol: + self.fail("Type aliases are prohibited in protocol bodies", s) + if not lvalue.name[0].isupper(): + self.note("Use variable annotation syntax to define protocol members", s) return True def disable_invalid_recursive_aliases( @@ -3292,7 +3640,7 @@ def analyze_lvalue( elif isinstance(lval, MemberExpr): self.analyze_member_lvalue(lval, explicit_type, is_final) if explicit_type and not self.is_self_member_ref(lval): - self.fail("Type cannot be declared in assignment to non-self " "attribute", lval) + self.fail("Type cannot be declared in assignment to non-self attribute", lval) elif isinstance(lval, IndexExpr): if explicit_type: self.fail("Unexpected type declaration", lval) @@ -3537,14 +3885,20 @@ def check_lvalue_validity(self, node: Expression | SymbolNode | None, ctx: Conte self.fail(message_registry.CANNOT_ASSIGN_TO_TYPE, ctx) def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: - if isinstance(typ, StarType) and not isinstance(lvalue, StarExpr): - self.fail("Star type only allowed for starred expressions", lvalue) if isinstance(lvalue, RefExpr): lvalue.is_inferred_def = False if isinstance(lvalue.node, Var): var = lvalue.node var.type = typ var.is_ready = True + typ = get_proper_type(typ) + if ( + var.is_final + and isinstance(typ, Instance) + and typ.last_known_value + and (not self.type or not self.type.is_enum) + ): + var.final_value = typ.last_known_value.value # If node is not a variable, we'll catch it elsewhere. elif isinstance(lvalue, TupleExpr): typ = get_proper_type(typ) @@ -3558,10 +3912,7 @@ def store_declared_types(self, lvalue: Lvalue, typ: Type) -> None: self.fail("Tuple type expected for multiple variables", lvalue) elif isinstance(lvalue, StarExpr): # Historical behavior for the old parser - if isinstance(typ, StarType): - self.store_declared_types(lvalue.expr, typ.type) - else: - self.store_declared_types(lvalue.expr, typ) + self.store_declared_types(lvalue.expr, typ) else: # This has been flagged elsewhere as an error, so just ignore here. pass @@ -3572,7 +3923,7 @@ def process_typevar_declaration(self, s: AssignmentStmt) -> bool: Return True if this looks like a type variable declaration (but maybe with errors), otherwise return False. """ - call = self.get_typevarlike_declaration(s, ("typing.TypeVar",)) + call = self.get_typevarlike_declaration(s, ("typing.TypeVar", "typing_extensions.TypeVar")) if not call: return False @@ -3631,12 +3982,16 @@ def process_typevar_declaration(self, s: AssignmentStmt) -> bool: type_var = TypeVarExpr(name, self.qualified_name(name), values, upper_bound, variance) type_var.line = call.line call.analyzed = type_var + updated = True else: assert isinstance(call.analyzed, TypeVarExpr) + updated = values != call.analyzed.values or upper_bound != call.analyzed.upper_bound call.analyzed.upper_bound = upper_bound call.analyzed.values = values if any(has_placeholder(v) for v in values) or has_placeholder(upper_bound): - self.defer(force_progress=True) + self.process_placeholder( + None, f"TypeVar {'values' if values else 'upper bound'}", s, force_progress=updated + ) self.add_symbol(name, call.analyzed, s) return True @@ -3828,8 +4183,7 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: if len(call.args) > 1: self.fail("Only the first argument to TypeVarTuple has defined semantics", s) - if not self.options.enable_incomplete_features: - self.fail('"TypeVarTuple" is not supported by mypy yet', s) + if not self.incomplete_feature_enabled(TYPE_VAR_TUPLE, s): return False name = self.extract_typevarlike_name(s, call) @@ -3838,8 +4192,9 @@ def process_typevartuple_declaration(self, s: AssignmentStmt) -> bool: # PEP 646 does not specify the behavior of variance, constraints, or bounds. if not call.analyzed: + tuple_fallback = self.named_type("builtins.tuple", [self.object_type()]) typevartuple_var = TypeVarTupleExpr( - name, self.qualified_name(name), self.object_type(), INVARIANT + name, self.qualified_name(name), self.object_type(), tuple_fallback, INVARIANT ) typevartuple_var.line = call.line call.analyzed = typevartuple_var @@ -3911,6 +4266,12 @@ def check_classvar(self, s: AssignmentStmt) -> None: # See https://github.com/python/mypy/issues/11538 self.fail(message_registry.CLASS_VAR_WITH_TYPEVARS, s) + if ( + analyzed is not None + and self.type.self_type in get_type_vars(analyzed) + and self.type.defn.type_vars + ): + self.fail(message_registry.CLASS_VAR_WITH_GENERIC_SELF, s) elif not isinstance(lvalue, MemberExpr) or self.is_self_member_ref(lvalue): # In case of member access, report error only when assigning to self # Other kinds of member assignments should be already reported @@ -4347,7 +4708,7 @@ def bind_name_expr(self, expr: NameExpr, sym: SymbolTableNode) -> None: else: expr.kind = sym.kind expr.node = sym.node - expr.fullname = sym.fullname + expr.fullname = sym.fullname or "" def visit_super_expr(self, expr: SuperExpr) -> None: if not self.type and not expr.call.args: @@ -4383,8 +4744,7 @@ def visit_dict_expr(self, expr: DictExpr) -> None: def visit_star_expr(self, expr: StarExpr) -> None: if not expr.valid: - # XXX TODO Change this error message - self.fail("Can use starred expression only as assignment target", expr) + self.fail("Can use starred expression only as assignment target", expr, blocker=True) else: expr.expr.accept(self) @@ -4573,7 +4933,7 @@ def visit_member_expr(self, expr: MemberExpr) -> None: self.process_placeholder(expr.name, "attribute", expr) return expr.kind = sym.kind - expr.fullname = sym.fullname + expr.fullname = sym.fullname or "" expr.node = sym.node elif isinstance(base, RefExpr): # This branch handles the case C.bar (or cls.bar or self.bar inside @@ -4605,7 +4965,7 @@ def visit_member_expr(self, expr: MemberExpr) -> None: if not n: return expr.kind = n.kind - expr.fullname = n.fullname + expr.fullname = n.fullname or "" expr.node = n.node def visit_op_expr(self, expr: OpExpr) -> None: @@ -4732,12 +5092,12 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: except TypeTranslationError: self.fail("Type expected within [...]", expr) return None - # We always allow unbound type variables in IndexExpr, since we - # may be analysing a type alias definition rvalue. The error will be - # reported elsewhere if it is not the case. analyzed = self.anal_type( typearg, - allow_unbound_tvars=True, + # The type application may appear in base class expression, + # where type variables are not bound yet. Or when accepting + # r.h.s. of type alias before we figured out it is a type alias. + allow_unbound_tvars=self.allow_unbound_tvars, allow_placeholder=True, allow_param_spec_literals=has_param_spec, ) @@ -4862,6 +5222,7 @@ def visit_conditional_expr(self, expr: ConditionalExpr) -> None: def visit__promote_expr(self, expr: PromoteExpr) -> None: analyzed = self.anal_type(expr.type) if analyzed is not None: + assert isinstance(analyzed, ProperType), "Cannot use type aliases for promotions" expr.type = analyzed def visit_yield_expr(self, e: YieldExpr) -> None: @@ -4886,10 +5247,11 @@ def visit_yield_expr(self, e: YieldExpr) -> None: e.expr.accept(self) def visit_await_expr(self, expr: AwaitExpr) -> None: - if not self.is_func_scope(): - self.fail('"await" outside function', expr) + if not self.is_func_scope() or not self.function_stack: + # We check both because is_function_scope() returns True inside comprehensions. + self.fail('"await" outside function', expr, serious=True, blocker=True) elif not self.function_stack[-1].is_coroutine: - self.fail('"await" outside coroutine ("async def")', expr) + self.fail('"await" outside coroutine ("async def")', expr, serious=True, blocker=True) expr.expr.accept(self) # @@ -5014,7 +5376,9 @@ class C: X = X # Initializer refers to outer scope Nested classes are an exception, since we want to support - arbitrary forward references in type annotations. + arbitrary forward references in type annotations. Also, we + allow forward references to type aliases to support recursive + types. """ # TODO: Forward reference to name imported in class body is not # caught. @@ -5025,7 +5389,7 @@ class C: node is None or self.is_textually_before_statement(node) or not self.is_defined_in_current_module(node.fullname) - or isinstance(node, TypeInfo) + or isinstance(node, (TypeInfo, TypeAlias)) or (isinstance(node, PlaceholderNode) and node.becomes_typeinfo) ) @@ -5062,7 +5426,7 @@ def is_overloaded_item(self, node: SymbolNode, statement: Statement) -> bool: return False def is_defined_in_current_module(self, fullname: str | None) -> bool: - if fullname is None: + if not fullname: return False return module_prefix(self.modules, fullname) == self.cur_mod_id @@ -5107,6 +5471,11 @@ def lookup_qualified( if isinstance(typ, AnyType): # Allow access through Var with Any type without error. return self.implicit_symbol(sym, name, parts[i:], typ) + # This might be something like valid `P.args` or invalid `P.__bound__` access. + # Important note that `ParamSpecExpr` is also ignored in other places. + # See https://github.com/python/mypy/pull/13468 + if isinstance(node, ParamSpecExpr) and part in ("args", "kwargs"): + return None # Lookup through invalid node, such as variable or function nextsym = None if not nextsym or nextsym.module_hidden: @@ -5252,7 +5621,7 @@ def named_type_or_none(self, fullname: str, args: list[Type] | None = None) -> I return None node = sym.node if isinstance(node, TypeAlias): - assert isinstance(node.target, Instance) # type: ignore + assert isinstance(node.target, Instance) # type: ignore[misc] node = node.target.type assert isinstance(node, TypeInfo), node if args is not None: @@ -5427,24 +5796,6 @@ def add_local(self, node: Var | FuncDef | OverloadedFuncDef, context: Context) - node._fullname = name self.add_symbol(name, node, context) - def add_module_symbol( - self, id: str, as_id: str, context: Context, module_public: bool, module_hidden: bool - ) -> None: - """Add symbol that is a reference to a module object.""" - if id in self.modules: - node = self.modules[id] - self.add_symbol( - as_id, node, context, module_public=module_public, module_hidden=module_hidden - ) - else: - self.add_unknown_imported_symbol( - as_id, - context, - target_name=id, - module_public=module_public, - module_hidden=module_hidden, - ) - def _get_node_for_class_scoped_import( self, name: str, symbol_node: SymbolNode | None, context: Context ) -> SymbolNode | None: @@ -5491,13 +5842,23 @@ def add_imported_symbol( self, name: str, node: SymbolTableNode, - context: Context, + context: ImportBase, module_public: bool, module_hidden: bool, ) -> None: """Add an alias to an existing symbol through import.""" assert not module_hidden or not module_public + existing_symbol = self.lookup_current_scope(name) + if ( + existing_symbol + and not isinstance(existing_symbol.node, PlaceholderNode) + and not isinstance(node.node, PlaceholderNode) + ): + # Import can redefine a variable. They get special treatment. + if self.process_import_over_existing_name(name, existing_symbol, node, context): + return + symbol_node: SymbolNode | None = node.node if self.is_class_scope(): @@ -5643,7 +6004,9 @@ def is_incomplete_namespace(self, fullname: str) -> bool: """ return fullname in self.incomplete_namespaces - def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: """Process a reference targeting placeholder node. If this is not a final iteration, defer current node, @@ -5655,11 +6018,12 @@ def process_placeholder(self, name: str, kind: str, ctx: Context) -> None: if self.final_iteration: self.cannot_resolve_name(name, kind, ctx) else: - self.defer(ctx) + self.defer(ctx, force_progress=force_progress) - def cannot_resolve_name(self, name: str, kind: str, ctx: Context) -> None: - self.fail(f'Cannot resolve {kind} "{name}" (possible cyclic definition)', ctx) - if self.options.enable_recursive_aliases and self.is_func_scope(): + def cannot_resolve_name(self, name: str | None, kind: str, ctx: Context) -> None: + name_format = f' "{name}"' if name else "" + self.fail(f"Cannot resolve {kind}{name_format} (possible cyclic definition)", ctx) + if not self.options.disable_recursive_aliases and self.is_func_scope(): self.note("Recursive types are not allowed at function scope", ctx) def qualified_name(self, name: str) -> str: @@ -5850,7 +6214,7 @@ def in_checked_function(self) -> bool: current_index = len(self.function_stack) - 1 while current_index >= 0: current_func = self.function_stack[current_index] - if isinstance(current_func, FuncItem) and not isinstance(current_func, LambdaExpr): + if not isinstance(current_func, LambdaExpr): return not current_func.is_dynamic() # Special case, `lambda` inherits the "checked" state from its parent. @@ -5875,12 +6239,22 @@ def fail( return # In case it's a bug and we don't really have context assert ctx is not None, msg - self.errors.report(ctx.get_line(), ctx.get_column(), msg, blocker=blocker, code=code) + self.errors.report(ctx.line, ctx.column, msg, blocker=blocker, code=code) def note(self, msg: str, ctx: Context, code: ErrorCode | None = None) -> None: if not self.in_checked_function(): return - self.errors.report(ctx.get_line(), ctx.get_column(), msg, severity="note", code=code) + self.errors.report(ctx.line, ctx.column, msg, severity="note", code=code) + + def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: + if feature not in self.options.enable_incomplete_feature: + self.fail( + f'"{feature}" support is experimental,' + f" use --enable-incomplete-feature={feature} to enable", + ctx, + ) + return False + return True def accept(self, node: Node) -> None: try: @@ -5889,7 +6263,11 @@ def accept(self, node: Node) -> None: report_internal_error(err, self.errors.file, node.line, self.errors, self.options) def expr_to_analyzed_type( - self, expr: Expression, report_invalid_types: bool = True, allow_placeholder: bool = False + self, + expr: Expression, + report_invalid_types: bool = True, + allow_placeholder: bool = False, + allow_type_any: bool = False, ) -> Type | None: if isinstance(expr, CallExpr): # This is a legacy syntax intended mostly for Python 2, we keep it for @@ -5914,7 +6292,10 @@ def expr_to_analyzed_type( return TupleType(info.tuple_type.items, fallback=fallback) typ = self.expr_to_unanalyzed_type(expr) return self.anal_type( - typ, report_invalid_types=report_invalid_types, allow_placeholder=allow_placeholder + typ, + report_invalid_types=report_invalid_types, + allow_placeholder=allow_placeholder, + allow_type_any=allow_type_any, ) def analyze_type_expr(self, expr: Expression) -> None: @@ -5924,7 +6305,7 @@ def analyze_type_expr(self, expr: Expression) -> None: # them semantically analyzed, however, if they need to treat it as an expression # and not a type. (Which is to say, mypyc needs to do this.) Do the analysis # in a fresh tvar scope in order to suppress any errors about using type variables. - with self.tvar_scope_frame(TypeVarLikeScope()): + with self.tvar_scope_frame(TypeVarLikeScope()), self.allow_unbound_tvars_set(): expr.accept(self) def type_analyzer( @@ -5937,6 +6318,8 @@ def type_analyzer( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + allow_type_any: bool = False, ) -> TypeAnalyser: if tvar_scope is None: tvar_scope = self.tvar_scope @@ -5952,6 +6335,8 @@ def type_analyzer( allow_placeholder=allow_placeholder, allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, + prohibit_self_type=prohibit_self_type, + allow_type_any=allow_type_any, ) tpan.in_dynamic_func = bool(self.function_stack and self.function_stack[-1].is_dynamic()) tpan.global_scope = not self.type and not self.function_stack @@ -5971,6 +6356,8 @@ def anal_type( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + allow_type_any: bool = False, third_pass: bool = False, ) -> Type | None: """Semantically analyze a type. @@ -5993,6 +6380,11 @@ def anal_type( NOTE: The caller shouldn't defer even if this returns None or a placeholder type. """ + has_self_type = find_self_type( + typ, lambda name: self.lookup_qualified(name, typ, suppress_errors=True) + ) + if has_self_type and self.type and prohibit_self_type is None: + self.setup_self_type() a = self.type_analyzer( tvar_scope=tvar_scope, allow_unbound_tvars=allow_unbound_tvars, @@ -6001,6 +6393,8 @@ def anal_type( allow_required=allow_required, allow_param_spec_literals=allow_param_spec_literals, report_invalid_types=report_invalid_types, + prohibit_self_type=prohibit_self_type, + allow_type_any=allow_type_any, ) tag = self.track_incomplete_refs() typ = typ.accept(a) @@ -6036,7 +6430,9 @@ def add_plugin_dependency(self, trigger: str, target: str | None = None) -> None target = self.scope.current_target() self.cur_mod_node.plugin_deps.setdefault(trigger, set()).add(target) - def add_type_alias_deps(self, aliases_used: Iterable[str], target: str | None = None) -> None: + def add_type_alias_deps( + self, aliases_used: Collection[str], target: str | None = None + ) -> None: """Add full names of type aliases on which the current node depends. This is used by fine-grained incremental mode to re-check the corresponding nodes. @@ -6066,6 +6462,17 @@ def parse_bool(self, expr: Expression) -> bool | None: return False return None + def parse_str_literal(self, expr: Expression) -> str | None: + """Attempt to find the string literal value of the given expression. Returns `None` if no + literal value can be found.""" + if isinstance(expr, StrExpr): + return expr.value + if isinstance(expr, RefExpr) and isinstance(expr.node, Var) and expr.node.type is not None: + values = try_getting_str_literals_from_type(expr.node.type) + if values is not None and len(values) == 1: + return values[0] + return None + def set_future_import_flags(self, module_name: str) -> None: if module_name in FUTURE_IMPORTS: self.modules[self.cur_mod_id].future_import_flags.add(FUTURE_IMPORTS[module_name]) @@ -6073,6 +6480,54 @@ def set_future_import_flags(self, module_name: str) -> None: def is_future_flag_set(self, flag: str) -> bool: return self.modules[self.cur_mod_id].is_future_flag_set(flag) + def parse_dataclass_transform_spec(self, call: CallExpr) -> DataclassTransformSpec: + """Build a DataclassTransformSpec from the arguments passed to the given call to + typing.dataclass_transform.""" + parameters = DataclassTransformSpec() + for name, value in zip(call.arg_names, call.args): + # Skip any positional args. Note that any such args are invalid, but we can rely on + # typeshed to enforce this and don't need an additional error here. + if name is None: + continue + + # field_specifiers is currently the only non-boolean argument; check for it first so + # so the rest of the block can fail through to handling booleans + if name == "field_specifiers": + parameters.field_specifiers = self.parse_dataclass_transform_field_specifiers( + value + ) + continue + + boolean = require_bool_literal_argument(self, value, name) + if boolean is None: + continue + + if name == "eq_default": + parameters.eq_default = boolean + elif name == "order_default": + parameters.order_default = boolean + elif name == "kw_only_default": + parameters.kw_only_default = boolean + elif name == "frozen_default": + parameters.frozen_default = boolean + else: + self.fail(f'Unrecognized dataclass_transform parameter "{name}"', call) + + return parameters + + def parse_dataclass_transform_field_specifiers(self, arg: Expression) -> tuple[str, ...]: + if not isinstance(arg, TupleExpr): + self.fail('"field_specifiers" argument must be a tuple literal', arg) + return tuple() + + names = [] + for specifier in arg.items: + if not isinstance(specifier, RefExpr): + self.fail('"field_specifiers" must only contain identifiers', specifier) + return tuple() + names.append(specifier.fullname) + return tuple(names) + def replace_implicit_first_type(sig: FunctionLike, new: Type) -> FunctionLike: if isinstance(sig, CallableType): diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index 654a29c38d08..3f5bc9c4c2de 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -22,7 +22,7 @@ Var, ) from mypy.options import Options -from mypy.types import Instance, Type +from mypy.types import MYPYC_NATIVE_INT_NAMES, Instance, ProperType # Hard coded type promotions (shared between all Python versions). # These add extra ad-hoc edges to the subtyping relation. For example, @@ -95,7 +95,7 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E # implement some methods. typ.abstract_attributes = sorted(abstract) if is_stub_file: - if typ.declared_metaclass and typ.declared_metaclass.type.fullname == "abc.ABCMeta": + if typ.declared_metaclass and typ.declared_metaclass.type.has_base("abc.ABCMeta"): return if typ.is_protocol: return @@ -155,7 +155,7 @@ def add_type_promotion( This includes things like 'int' being compatible with 'float'. """ defn = info.defn - promote_targets: list[Type] = [] + promote_targets: list[ProperType] = [] for decorator in defn.decorators: if isinstance(decorator, CallExpr): analyzed = decorator.analyzed @@ -165,6 +165,10 @@ def add_type_promotion( if not promote_targets: if defn.fullname in TYPE_PROMOTIONS: target_sym = module_names.get(TYPE_PROMOTIONS[defn.fullname]) + if defn.fullname == "builtins.bytearray" and options.disable_bytearray_promotion: + target_sym = None + elif defn.fullname == "builtins.memoryview" and options.disable_memoryview_promotion: + target_sym = None # With test stubs, the target may not exist. if target_sym: target_info = target_sym.node @@ -173,10 +177,10 @@ def add_type_promotion( # Special case the promotions between 'int' and native integer types. # These have promotions going both ways, such as from 'int' to 'i64' # and 'i64' to 'int', for convenience. - if defn.fullname == "mypy_extensions.i64" or defn.fullname == "mypy_extensions.i32": + if defn.fullname in MYPYC_NATIVE_INT_NAMES: int_sym = builtin_names["int"] assert isinstance(int_sym.node, TypeInfo) int_sym.node._promote.append(Instance(defn.info, [])) - defn.info.alt_promote = int_sym.node + defn.info.alt_promote = Instance(int_sym.node, []) if promote_targets: defn.info._promote.extend(promote_targets) diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py index d48e620b89f1..c7b8e44f65aa 100644 --- a/mypy/semanal_enum.py +++ b/mypy/semanal_enum.py @@ -220,14 +220,14 @@ def parse_enum_call_args( items.append(field) else: return self.fail_enum_call_arg( - "%s() expects a string, tuple, list or dict literal as the second argument" + "Second argument of %s() must be string, tuple, list or dict literal for mypy to determine Enum members" % class_name, call, ) else: # TODO: Allow dict(x=1, y=2) as a substitute for {'x': 1, 'y': 2}? return self.fail_enum_call_arg( - "%s() expects a string, tuple, list or dict literal as the second argument" + "Second argument of %s() must be string, tuple, list or dict literal for mypy to determine Enum members" % class_name, call, ) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 406fd93139d1..a5e85878e931 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -37,6 +37,7 @@ from mypy.nodes import Decorator, FuncDef, MypyFile, OverloadedFuncDef, TypeInfo, Var from mypy.options import Options from mypy.plugin import ClassDefContext +from mypy.plugins import dataclasses as dataclasses_plugin from mypy.semanal import ( SemanticAnalyzer, apply_semantic_analyzer_patches, @@ -49,6 +50,7 @@ check_protocol_status, ) from mypy.semanal_infer import infer_decorator_signature_if_simple +from mypy.semanal_shared import find_dataclass_transform_spec from mypy.semanal_typeargs import TypeArgumentAnalyzer from mypy.server.aststrip import SavedAttributes from mypy.util import is_typeshed_file @@ -66,7 +68,14 @@ # Number of passes over core modules before going on to the rest of the builtin SCC. CORE_WARMUP: Final = 2 -core_modules: Final = ["typing", "builtins", "abc", "collections"] +core_modules: Final = [ + "typing", + "_collections_abc", + "builtins", + "abc", + "collections", + "collections.abc", +] def semantic_analysis_for_scc(graph: Graph, scc: list[str], errors: Errors) -> None: @@ -367,7 +376,11 @@ def check_type_arguments(graph: Graph, scc: list[str], errors: Errors) -> None: for module in scc: state = graph[module] assert state.tree - analyzer = TypeArgumentAnalyzer(errors, state.options, is_typeshed_file(state.path or "")) + analyzer = TypeArgumentAnalyzer( + errors, + state.options, + is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + ) with state.wrap_context(): with mypy.state.state.strict_optional_set(state.options.strict_optional): state.tree.accept(analyzer) @@ -381,7 +394,11 @@ def check_type_arguments_in_targets( This mirrors the logic in check_type_arguments() except that we process only some targets. This is used in fine grained incremental mode. """ - analyzer = TypeArgumentAnalyzer(errors, state.options, is_typeshed_file(state.path or "")) + analyzer = TypeArgumentAnalyzer( + errors, + state.options, + is_typeshed_file(state.options.abs_custom_typeshed_dir, state.path or ""), + ) with state.wrap_context(): with mypy.state.state.strict_optional_set(state.options.strict_optional): for target in targets: @@ -442,11 +459,29 @@ def apply_hooks_to_class( ok = True for decorator in defn.decorators: with self.file_context(file_node, options, info): + hook = None + decorator_name = self.get_fullname_for_hook(decorator) if decorator_name: hook = self.plugin.get_class_decorator_hook_2(decorator_name) - if hook: - ok = ok and hook(ClassDefContext(defn, decorator, self)) + # Special case: if the decorator is itself decorated with + # typing.dataclass_transform, apply the hook for the dataclasses plugin + # TODO: remove special casing here + if hook is None and find_dataclass_transform_spec(decorator): + hook = dataclasses_plugin.dataclass_class_maker_callback + + if hook: + ok = ok and hook(ClassDefContext(defn, decorator, self)) + + # Check if the class definition itself triggers a dataclass transform (via a parent class/ + # metaclass) + spec = find_dataclass_transform_spec(info) + if spec is not None: + with self.file_context(file_node, options, info): + # We can't use the normal hook because reason = defn, and ClassDefContext only accepts + # an Expression for reason + ok = ok and dataclasses_plugin.DataclassTransformer(defn, defn, spec, self).transform() + return ok diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 4375602b5076..1194557836b1 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -32,6 +32,7 @@ NameExpr, PassStmt, RefExpr, + Statement, StrExpr, SymbolTable, SymbolTableNode, @@ -111,7 +112,7 @@ def analyze_namedtuple_classdef( if result is None: # This is a valid named tuple, but some types are incomplete. return True, None - items, types, default_items = result + items, types, default_items, statements = result if is_func_scope and "@" not in defn.name: defn.name += "@" + str(defn.line) existing_info = None @@ -123,6 +124,7 @@ def analyze_namedtuple_classdef( defn.analyzed = NamedTupleExpr(info, is_typed=True) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = statements # All done: this is a valid named tuple with all types known. return True, info # This can't be a valid named tuple. @@ -130,24 +132,27 @@ def analyze_namedtuple_classdef( def check_namedtuple_classdef( self, defn: ClassDef, is_stub_file: bool - ) -> tuple[list[str], list[Type], dict[str, Expression]] | None: + ) -> tuple[list[str], list[Type], dict[str, Expression], list[Statement]] | None: """Parse and validate fields in named tuple class definition. - Return a three tuple: + Return a four tuple: * field names * field types * field default values + * valid statements or None, if any of the types are not ready. """ if self.options.python_version < (3, 6) and not is_stub_file: self.fail("NamedTuple class syntax is only supported in Python 3.6", defn) - return [], [], {} + return [], [], {}, [] if len(defn.base_type_exprs) > 1: self.fail("NamedTuple should be a single base", defn) items: list[str] = [] types: list[Type] = [] default_items: dict[str, Expression] = {} + statements: list[Statement] = [] for stmt in defn.defs.body: + statements.append(stmt) if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty namedtuples). if isinstance(stmt, PassStmt) or ( @@ -160,9 +165,13 @@ def check_namedtuple_classdef( # And docstrings. if isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, StrExpr): continue + statements.pop() + defn.removed_statements.append(stmt) self.fail(NAMEDTUP_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. + statements.pop() + defn.removed_statements.append(stmt) self.fail(NAMEDTUP_CLASS_ERROR, stmt) else: # Append name and type in this case... @@ -176,8 +185,9 @@ def check_namedtuple_classdef( # it would be inconsistent with type aliases. analyzed = self.api.anal_type( stmt.type, - allow_placeholder=self.options.enable_recursive_aliases + allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", ) if analyzed is None: # Something is incomplete. We need to defer this named tuple. @@ -198,7 +208,7 @@ def check_namedtuple_classdef( ) else: default_items[name] = stmt.rvalue - return items, types, default_items + return items, types, default_items, statements def check_namedtuple( self, node: Expression, var_name: str | None, is_func_scope: bool @@ -321,11 +331,12 @@ def parse_namedtuple_args( ) -> None | (tuple[list[str], list[Type], list[Expression], str, list[TypeVarLikeType], bool]): """Parse a namedtuple() call into data needed to construct a type. - Returns a 5-tuple: + Returns a 6-tuple: - List of argument names - List of argument types - List of default values - First argument of namedtuple + - All typevars found in the field definition - Whether all types are ready. Return None if the definition didn't typecheck. @@ -442,8 +453,9 @@ def parse_namedtuple_fields_with_types( # We never allow recursive types at function scope. analyzed = self.api.anal_type( type, - allow_placeholder=self.options.enable_recursive_aliases + allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="NamedTuple item type", ) # Workaround #4987 and avoid introducing a bogus UnboundType if isinstance(analyzed, UnboundType): @@ -469,13 +481,9 @@ def build_namedtuple_typeinfo( strtype = self.api.named_type("builtins.str") implicit_any = AnyType(TypeOfAny.special_form) basetuple_type = self.api.named_type("builtins.tuple", [implicit_any]) - dictype = self.api.named_type_or_none( - "builtins.dict", [strtype, implicit_any] - ) or self.api.named_type("builtins.object") + dictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) # Actual signature should return OrderedDict[str, Union[types]] - ordereddictype = self.api.named_type_or_none( - "builtins.dict", [strtype, implicit_any] - ) or self.api.named_type("builtins.object") + ordereddictype = self.api.named_type("builtins.dict", [strtype, implicit_any]) fallback = self.api.named_type("builtins.tuple", [implicit_any]) # Note: actual signature should accept an invariant version of Iterable[UnionType[types]]. # but it can't be expressed. 'new' and 'len' should be callable types. @@ -489,7 +497,9 @@ def build_namedtuple_typeinfo( info.is_named_tuple = True tuple_base = TupleType(types, fallback) if info.special_alias and has_placeholder(info.special_alias.target): - self.api.defer(force_progress=True) + self.api.process_placeholder( + None, "NamedTuple item", info, force_progress=tuple_base != info.tuple_type + ) info.update_tuple_type(tuple_base) info.line = line # For use by mypyc. diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index b571ed538e09..cb1055a62186 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -203,7 +203,7 @@ def check_newtype_args( self.api.anal_type( unanalyzed_type, report_invalid_types=False, - allow_placeholder=self.options.enable_recursive_aliases + allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), ) ) @@ -249,10 +249,16 @@ def build_newtype_typeinfo( init_func = FuncDef("__init__", args, Block([]), typ=signature) init_func.info = info init_func._fullname = info.fullname + ".__init__" + if not existing_info: + updated = True + else: + previous_sym = info.names["__init__"].node + assert isinstance(previous_sym, FuncDef) + updated = old_type != previous_sym.arguments[1].variable.type info.names["__init__"] = SymbolTableNode(MDEF, init_func) - if has_placeholder(old_type) or info.tuple_type and has_placeholder(info.tuple_type): - self.api.defer(force_progress=True) + if has_placeholder(old_type): + self.api.process_placeholder(None, "NewType base", info, force_progress=updated) return info # Helpers diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index d9ded032591b..03efbe6ca1b8 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -3,25 +3,32 @@ from __future__ import annotations from abc import abstractmethod -from typing import Callable -from typing_extensions import Final, Protocol +from typing import Callable, overload +from typing_extensions import Final, Literal, Protocol from mypy_extensions import trait from mypy import join -from mypy.errorcodes import ErrorCode +from mypy.errorcodes import LITERAL_REQ, ErrorCode from mypy.nodes import ( + CallExpr, + ClassDef, Context, + DataclassTransformSpec, + Decorator, Expression, FuncDef, Node, + OverloadedFuncDef, + RefExpr, SymbolNode, SymbolTable, SymbolTableNode, TypeInfo, ) +from mypy.plugin import SemanticAnalyzerPluginInterface from mypy.tvar_scope import TypeVarLikeScope -from mypy.type_visitor import TypeQuery +from mypy.type_visitor import ANY_STRATEGY, BoolTypeQuery from mypy.types import ( TPDICT_FB_NAMES, FunctionLike, @@ -38,6 +45,11 @@ get_proper_type, ) +# Subclasses can override these Var attributes with incompatible types. This can also be +# set for individual attributes using 'allow_incompatible_override' of Var. +ALLOW_INCOMPATIBLE_OVERRIDE: Final = ("__slots__", "__deletable__", "__match_args__") + + # Priorities for ordering of patches within the "patch" phase of semantic analysis # (after the main pass): @@ -82,6 +94,10 @@ def fail( def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: raise NotImplementedError + @abstractmethod + def incomplete_feature_enabled(self, feature: str, ctx: Context) -> bool: + raise NotImplementedError + @abstractmethod def record_incomplete_ref(self) -> None: raise NotImplementedError @@ -115,6 +131,11 @@ def is_stub_file(self) -> bool: def is_func_scope(self) -> bool: raise NotImplementedError + @property + @abstractmethod + def type(self) -> TypeInfo | None: + raise NotImplementedError + @trait class SemanticAnalyzerInterface(SemanticAnalyzerCoreInterface): @@ -158,6 +179,7 @@ def anal_type( allow_required: bool = False, allow_placeholder: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, ) -> Type | None: raise NotImplementedError @@ -222,6 +244,12 @@ def qualified_name(self, n: str) -> str: def is_typeshed_stub_file(self) -> bool: raise NotImplementedError + @abstractmethod + def process_placeholder( + self, name: str | None, kind: str, ctx: Context, force_progress: bool = False + ) -> None: + raise NotImplementedError + def set_callable_name(sig: Type, fdef: FuncDef) -> ProperType: sig = get_proper_type(sig) @@ -309,9 +337,9 @@ def paramspec_kwargs( ) -class HasPlaceholders(TypeQuery[bool]): +class HasPlaceholders(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_placeholder_type(self, t: PlaceholderType) -> bool: return True @@ -320,3 +348,114 @@ def visit_placeholder_type(self, t: PlaceholderType) -> bool: def has_placeholder(typ: Type) -> bool: """Check if a type contains any placeholder types (recursively).""" return typ.accept(HasPlaceholders()) + + +def find_dataclass_transform_spec(node: Node | None) -> DataclassTransformSpec | None: + """ + Find the dataclass transform spec for the given node, if any exists. + + Per PEP 681 (https://peps.python.org/pep-0681/#the-dataclass-transform-decorator), dataclass + transforms can be specified in multiple ways, including decorator functions and + metaclasses/base classes. This function resolves the spec from any of these variants. + """ + + # The spec only lives on the function/class definition itself, so we need to unwrap down to that + # point + if isinstance(node, CallExpr): + # Like dataclasses.dataclass, transform-based decorators can be applied either with or + # without parameters; ie, both of these forms are accepted: + # + # @typing.dataclass_transform + # class Foo: ... + # @typing.dataclass_transform(eq=True, order=True, ...) + # class Bar: ... + # + # We need to unwrap the call for the second variant. + node = node.callee + + if isinstance(node, RefExpr): + node = node.node + + if isinstance(node, Decorator): + # typing.dataclass_transform usage must always result in a Decorator; it always uses the + # `@dataclass_transform(...)` syntax and never `@dataclass_transform` + node = node.func + + if isinstance(node, OverloadedFuncDef): + # The dataclass_transform decorator may be attached to any single overload, so we must + # search them all. + # Note that using more than one decorator is undefined behavior, so we can just take the + # first that we find. + for candidate in node.items: + spec = find_dataclass_transform_spec(candidate) + if spec is not None: + return spec + return find_dataclass_transform_spec(node.impl) + + # For functions, we can directly consult the AST field for the spec + if isinstance(node, FuncDef): + return node.dataclass_transform_spec + + if isinstance(node, ClassDef): + node = node.info + if isinstance(node, TypeInfo): + # Search all parent classes to see if any are decorated with `typing.dataclass_transform` + for base in node.mro[1:]: + if base.dataclass_transform_spec is not None: + return base.dataclass_transform_spec + + # Check if there is a metaclass that is decorated with `typing.dataclass_transform` + # + # Note that PEP 681 only discusses using a metaclass that is directly decorated with + # `typing.dataclass_transform`; subclasses thereof should be treated with dataclass + # semantics rather than as transforms: + # + # > If dataclass_transform is applied to a class, dataclass-like semantics will be assumed + # > for any class that directly or indirectly derives from the decorated class or uses the + # > decorated class as a metaclass. + # + # The wording doesn't make this entirely explicit, but Pyright (the reference + # implementation for this PEP) only handles directly-decorated metaclasses. + metaclass_type = node.metaclass_type + if metaclass_type is not None and metaclass_type.type.dataclass_transform_spec is not None: + return metaclass_type.type.dataclass_transform_spec + + return None + + +# Never returns `None` if a default is given +@overload +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: Literal[True] | Literal[False], +) -> bool: + ... + + +@overload +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: None = None, +) -> bool | None: + ... + + +def require_bool_literal_argument( + api: SemanticAnalyzerInterface | SemanticAnalyzerPluginInterface, + expression: Expression, + name: str, + default: bool | None = None, +) -> bool | None: + """Attempt to interpret an expression as a boolean literal, and fail analysis if we can't.""" + value = api.parse_bool(expression) + if value is None: + api.fail( + f'"{name}" argument must be a True or False literal', expression, code=LITERAL_REQ + ) + return default + + return value diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index f988014cdd02..b9965236c379 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -7,23 +7,27 @@ from __future__ import annotations +from typing import Sequence + from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode from mypy.errors import Errors from mypy.messages import format_type from mypy.mixedtraverser import MixedTraverserVisitor -from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile, TypeInfo +from mypy.nodes import Block, ClassDef, Context, FakeInfo, FuncItem, MypyFile from mypy.options import Options from mypy.scope import Scope from mypy.subtypes import is_same_type, is_subtype from mypy.types import ( AnyType, Instance, + Parameters, ParamSpecType, TupleType, Type, TypeAliasType, TypeOfAny, + TypeVarLikeType, TypeVarTupleType, TypeVarType, UnboundType, @@ -35,6 +39,7 @@ class TypeArgumentAnalyzer(MixedTraverserVisitor): def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> None: + super().__init__() self.errors = errors self.options = options self.is_typeshed_file = is_typeshed_file @@ -46,7 +51,7 @@ def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> self.seen_aliases: set[TypeAliasType] = set() def visit_mypy_file(self, o: MypyFile) -> None: - self.errors.set_file(o.path, o.fullname, scope=self.scope) + self.errors.set_file(o.path, o.fullname, scope=self.scope, options=self.options) with self.scope.module_scope(o.fullname): super().visit_mypy_file(o) @@ -77,7 +82,12 @@ def visit_type_alias_type(self, t: TypeAliasType) -> None: # correct aliases. if t.alias and len(t.args) != len(t.alias.alias_tvars): t.args = [AnyType(TypeOfAny.from_error) for _ in t.alias.alias_tvars] - get_proper_type(t).accept(self) + assert t.alias is not None, f"Unfixed type alias {t.type_ref}" + is_error = self.validate_args(t.alias.name, t.args, t.alias.alias_tvars, t) + if not is_error: + # If there was already an error for the alias itself, there is no point in checking + # the expansion, most likely it will result in the same kind of error. + get_proper_type(t).accept(self) def visit_instance(self, t: Instance) -> None: # Type argument counts were checked in the main semantic analyzer pass. We assume @@ -85,36 +95,67 @@ def visit_instance(self, t: Instance) -> None: info = t.type if isinstance(info, FakeInfo): return # https://github.com/python/mypy/issues/11079 - for (i, arg), tvar in zip(enumerate(t.args), info.defn.type_vars): + self.validate_args(info.name, t.args, info.defn.type_vars, t) + super().visit_instance(t) + + def validate_args( + self, name: str, args: Sequence[Type], type_vars: list[TypeVarLikeType], ctx: Context + ) -> bool: + is_error = False + for (i, arg), tvar in zip(enumerate(args), type_vars): if isinstance(tvar, TypeVarType): if isinstance(arg, ParamSpecType): # TODO: Better message - self.fail(f'Invalid location for ParamSpec "{arg.name}"', t) + is_error = True + self.fail(f'Invalid location for ParamSpec "{arg.name}"', ctx) + self.note( + "You can use ParamSpec as the first argument to Callable, e.g., " + "'Callable[{}, int]'".format(arg.name), + ctx, + ) continue if tvar.values: if isinstance(arg, TypeVarType): + if self.in_type_alias_expr: + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue arg_values = arg.values if not arg_values: + is_error = True self.fail( - message_registry.INVALID_TYPEVAR_AS_TYPEARG.format( - arg.name, info.name - ), - t, + message_registry.INVALID_TYPEVAR_AS_TYPEARG.format(arg.name, name), + ctx, code=codes.TYPE_VAR, ) continue else: arg_values = [arg] - self.check_type_var_values(info, arg_values, tvar.name, tvar.values, i + 1, t) + if self.check_type_var_values(name, arg_values, tvar.name, tvar.values, ctx): + is_error = True if not is_subtype(arg, tvar.upper_bound): + if self.in_type_alias_expr and isinstance(arg, TypeVarType): + # Type aliases are allowed to use unconstrained type variables + # error will be checked at substitution point. + continue + is_error = True self.fail( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( - format_type(arg), info.name, format_type(tvar.upper_bound) + format_type(arg), name, format_type(tvar.upper_bound) ), - t, + ctx, code=codes.TYPE_VAR, ) - super().visit_instance(t) + elif isinstance(tvar, ParamSpecType): + if not isinstance( + get_proper_type(arg), (ParamSpecType, Parameters, AnyType, UnboundType) + ): + self.fail( + "Can only replace ParamSpec with a parameter types list or" + f" another ParamSpec, got {format_type(arg)}", + ctx, + ) + return is_error def visit_unpack_type(self, typ: UnpackType) -> None: proper_type = get_proper_type(typ.type) @@ -132,28 +173,25 @@ def visit_unpack_type(self, typ: UnpackType) -> None: self.fail(message_registry.INVALID_UNPACK.format(proper_type), typ) def check_type_var_values( - self, - type: TypeInfo, - actuals: list[Type], - arg_name: str, - valids: list[Type], - arg_number: int, - context: Context, - ) -> None: + self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context + ) -> bool: + is_error = False for actual in get_proper_types(actuals): - # TODO: bind type variables in class bases/alias targets - # so we can safely check this, currently we miss some errors. + # We skip UnboundType here, since they may appear in defn.bases, + # the error will be caught when visiting info.bases, that have bound type + # variables. if not isinstance(actual, (AnyType, UnboundType)) and not any( is_same_type(actual, value) for value in valids ): + is_error = True if len(actuals) > 1 or not isinstance(actual, Instance): self.fail( - message_registry.INVALID_TYPEVAR_ARG_VALUE.format(type.name), + message_registry.INVALID_TYPEVAR_ARG_VALUE.format(name), context, code=codes.TYPE_VAR, ) else: - class_name = f'"{type.name}"' + class_name = f'"{name}"' actual_type_name = f'"{actual.type.name}"' self.fail( message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( @@ -162,6 +200,10 @@ def check_type_var_values( context, code=codes.TYPE_VAR, ) + return is_error def fail(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: - self.errors.report(context.get_line(), context.get_column(), msg, code=code) + self.errors.report(context.line, context.column, msg, code=code) + + def note(self, msg: str, context: Context, *, code: ErrorCode | None = None) -> None: + self.errors.report(context.line, context.column, msg, severity="note", code=code) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 77e83e53f686..acb93edb7d2d 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -4,7 +4,7 @@ from typing_extensions import Final -from mypy import errorcodes as codes +from mypy import errorcodes as codes, message_registry from mypy.errorcodes import ErrorCode from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type from mypy.messages import MessageBuilder @@ -23,6 +23,7 @@ NameExpr, PassStmt, RefExpr, + Statement, StrExpr, TempNode, TupleExpr, @@ -30,7 +31,11 @@ TypeInfo, ) from mypy.options import Options -from mypy.semanal_shared import SemanticAnalyzerInterface, has_placeholder +from mypy.semanal_shared import ( + SemanticAnalyzerInterface, + has_placeholder, + require_bool_literal_argument, +) from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type from mypy.types import ( TPDICT_NAMES, @@ -79,6 +84,9 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N self.api.accept(base_expr) if base_expr.fullname in TPDICT_NAMES or self.is_typeddict(base_expr): possible = True + if isinstance(base_expr.node, TypeInfo) and base_expr.node.is_final: + err = message_registry.CANNOT_INHERIT_FROM_FINAL + self.fail(err.format(base_expr.node.name).value, defn, code=err.code) if not possible: return False, None existing_info = None @@ -90,7 +98,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N and defn.base_type_exprs[0].fullname in TPDICT_NAMES ): # Building a new TypedDict - fields, types, required_keys = self.analyze_typeddict_classdef_fields(defn) + fields, types, statements, required_keys = self.analyze_typeddict_classdef_fields(defn) if fields is None: return True, None # Defer info = self.build_typeddict_typeinfo( @@ -99,6 +107,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = statements return True, info # Extending/merging existing TypedDicts @@ -136,7 +145,12 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N # Iterate over bases in reverse order so that leftmost base class' keys take precedence for base in reversed(typeddict_bases): self.add_keys_and_types_from_base(base, keys, types, required_keys, defn) - new_keys, new_types, new_required_keys = self.analyze_typeddict_classdef_fields(defn, keys) + ( + new_keys, + new_types, + new_statements, + new_required_keys, + ) = self.analyze_typeddict_classdef_fields(defn, keys) if new_keys is None: return True, None # Defer keys.extend(new_keys) @@ -148,6 +162,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> tuple[bool, TypeInfo | N defn.analyzed = TypedDictExpr(info) defn.analyzed.line = defn.line defn.analyzed.column = defn.column + defn.defs.body = new_statements return True, info def add_keys_and_types_from_base( @@ -178,7 +193,7 @@ def add_keys_and_types_from_base( valid_items = base_items.copy() # Always fix invalid bases to avoid crashes. - tvars = info.type_vars + tvars = info.defn.type_vars if len(base_args) != len(tvars): any_kind = TypeOfAny.from_omitted_generics if base_args: @@ -215,7 +230,7 @@ def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None: analyzed = self.api.anal_type( type, allow_required=True, - allow_placeholder=self.options.enable_recursive_aliases + allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), ) if analyzed is None: @@ -224,7 +239,7 @@ def analyze_base_args(self, base: IndexExpr, ctx: Context) -> list[Type] | None: return base_args def map_items_to_base( - self, valid_items: dict[str, Type], tvars: list[str], base_args: list[Type] + self, valid_items: dict[str, Type], tvars: list[TypeVarLikeType], base_args: list[Type] ) -> dict[str, Type]: """Map item types to how they would look in their base with type arguments applied. @@ -247,7 +262,7 @@ def map_items_to_base( def analyze_typeddict_classdef_fields( self, defn: ClassDef, oldfields: list[str] | None = None - ) -> tuple[list[str] | None, list[Type], set[str]]: + ) -> tuple[list[str] | None, list[Type], list[Statement], set[str]]: """Analyze fields defined in a TypedDict class definition. This doesn't consider inherited fields (if any). Also consider totality, @@ -256,20 +271,27 @@ def analyze_typeddict_classdef_fields( Return tuple with these items: * List of keys (or None if found an incomplete reference --> deferral) * List of types for each key + * List of statements from defn.defs.body that are legally allowed to be a + part of a TypedDict definition * Set of required keys """ fields: list[str] = [] types: list[Type] = [] + statements: list[Statement] = [] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): - # Still allow pass or ... (for empty TypedDict's). - if not isinstance(stmt, PassStmt) and not ( + # Still allow pass or ... (for empty TypedDict's) and docstrings + if isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, (EllipsisExpr, StrExpr)) ): + statements.append(stmt) + else: + defn.removed_statements.append(stmt) self.fail(TPDICT_CLASS_ERROR, stmt) elif len(stmt.lvalues) > 1 or not isinstance(stmt.lvalues[0], NameExpr): # An assignment, but an invalid one. + defn.removed_statements.append(stmt) self.fail(TPDICT_CLASS_ERROR, stmt) else: name = stmt.lvalues[0].name @@ -278,21 +300,23 @@ def analyze_typeddict_classdef_fields( if name in fields: self.fail(f'Duplicate TypedDict key "{name}"', stmt) continue - # Append name and type in this case... + # Append stmt, name, and type in this case... fields.append(name) + statements.append(stmt) if stmt.type is None: types.append(AnyType(TypeOfAny.unannotated)) else: analyzed = self.api.anal_type( stmt.type, allow_required=True, - allow_placeholder=self.options.enable_recursive_aliases + allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", ) if analyzed is None: - return None, [], set() # Need to defer + return None, [], [], set() # Need to defer types.append(analyzed) - # ...despite possible minor failures that allow further analyzis. + # ...despite possible minor failures that allow further analysis. if stmt.type is None or hasattr(stmt, "new_syntax") and not stmt.new_syntax: self.fail(TPDICT_CLASS_ERROR, stmt) elif not isinstance(stmt.rvalue, TempNode): @@ -300,10 +324,7 @@ def analyze_typeddict_classdef_fields( self.fail("Right hand side values are not supported in TypedDict", stmt) total: bool | None = True if "total" in defn.keywords: - total = self.api.parse_bool(defn.keywords["total"]) - if total is None: - self.fail('Value of "total" must be True or False', defn) - total = True + total = require_bool_literal_argument(self.api, defn.keywords["total"], "total", True) required_keys = { field for (field, t) in zip(fields, types) @@ -314,7 +335,7 @@ def analyze_typeddict_classdef_fields( t.item if isinstance(t, RequiredType) else t for t in types ] - return fields, types, required_keys + return fields, types, statements, required_keys def check_typeddict( self, node: Expression, var_name: str | None, is_func_scope: bool @@ -416,11 +437,9 @@ def parse_typeddict_args( ) total: bool | None = True if len(args) == 3: - total = self.api.parse_bool(call.args[2]) + total = require_bool_literal_argument(self.api, call.args[2], "total") if total is None: - return self.fail_typeddict_arg( - 'TypedDict() "total" argument must be True or False', call - ) + return "", [], [], True, [], False dictexpr = args[1] tvar_defs = self.api.get_and_bind_all_tvars([t for k, t in dictexpr.items]) res = self.parse_typeddict_fields_with_types(dictexpr.items, call) @@ -481,8 +500,9 @@ def parse_typeddict_fields_with_types( analyzed = self.api.anal_type( type, allow_required=True, - allow_placeholder=self.options.enable_recursive_aliases + allow_placeholder=not self.options.disable_recursive_aliases and not self.api.is_func_scope(), + prohibit_self_type="TypedDict item type", ) if analyzed is None: return None @@ -514,7 +534,9 @@ def build_typeddict_typeinfo( info = existing_info or self.api.basic_new_typeinfo(name, fallback, line) typeddict_type = TypedDictType(dict(zip(items, types)), required_keys, fallback) if info.special_alias and has_placeholder(info.special_alias.target): - self.api.defer(force_progress=True) + self.api.process_placeholder( + None, "TypedDict item", info, force_progress=typeddict_type != info.typeddict_type + ) info.update_typeddict_type(typeddict_type) return info diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index e913188df02f..c942a5eb3b0f 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -52,13 +52,15 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' from __future__ import annotations -from typing import Sequence, Tuple +from typing import Sequence, Tuple, Union, cast from typing_extensions import TypeAlias as _TypeAlias +from mypy.expandtype import expand_type from mypy.nodes import ( UNBOUND_IMPORTED, Decorator, FuncBase, + FuncDef, FuncItem, MypyFile, OverloadedFuncDef, @@ -68,8 +70,10 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' TypeAlias, TypeInfo, TypeVarExpr, + TypeVarTupleExpr, Var, ) +from mypy.semanal_shared import find_dataclass_transform_spec from mypy.types import ( AnyType, CallableType, @@ -87,6 +91,8 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' TypeAliasType, TypedDictType, TypeType, + TypeVarId, + TypeVarLikeType, TypeVarTupleType, TypeVarType, TypeVisitor, @@ -104,11 +110,17 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' # snapshots are immutable). # # For example, the snapshot of the 'int' type is ('Instance', 'builtins.int', ()). -SnapshotItem: _TypeAlias = Tuple[object, ...] + +# Type snapshots are strict, they must be hashable and ordered (e.g. for Unions). +Primitive: _TypeAlias = Union[str, float, int, bool] # float is for Literal[3.14] support. +SnapshotItem: _TypeAlias = Tuple[Union[Primitive, "SnapshotItem"], ...] + +# Symbol snapshots can be more lenient. +SymbolSnapshot: _TypeAlias = Tuple[object, ...] def compare_symbol_table_snapshots( - name_prefix: str, snapshot1: dict[str, SnapshotItem], snapshot2: dict[str, SnapshotItem] + name_prefix: str, snapshot1: dict[str, SymbolSnapshot], snapshot2: dict[str, SymbolSnapshot] ) -> set[str]: """Return names that are different in two snapshots of a symbol table. @@ -150,7 +162,7 @@ def compare_symbol_table_snapshots( return triggers -def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SnapshotItem]: +def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, SymbolSnapshot]: """Create a snapshot description that represents the state of a symbol table. The snapshot has a representation based on nested tuples and dicts @@ -160,7 +172,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna things defined in other modules are represented just by the names of the targets. """ - result: dict[str, SnapshotItem] = {} + result: dict[str, SymbolSnapshot] = {} for name, symbol in table.items(): node = symbol.node # TODO: cross_ref? @@ -182,13 +194,15 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna elif isinstance(node, TypeAlias): result[name] = ( "TypeAlias", - node.alias_tvars, + snapshot_types(node.alias_tvars), node.normalized, node.no_args, snapshot_optional_type(node.target), ) elif isinstance(node, ParamSpecExpr): result[name] = ("ParamSpec", node.variance, snapshot_type(node.upper_bound)) + elif isinstance(node, TypeVarTupleExpr): + result[name] = ("TypeVarTuple", node.variance, snapshot_type(node.upper_bound)) else: assert symbol.kind != UNBOUND_IMPORTED if node and get_prefix(node.fullname) != name_prefix: @@ -199,7 +213,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> dict[str, Sna return result -def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> tuple[object, ...]: +def snapshot_definition(node: SymbolNode | None, common: SymbolSnapshot) -> SymbolSnapshot: """Create a snapshot description of a symbol table node. The representation is nested tuples and dicts. Only externally @@ -211,6 +225,13 @@ def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> signature = snapshot_type(node.type) else: signature = snapshot_untyped_signature(node) + impl: FuncDef | None = None + if isinstance(node, FuncDef): + impl = node + elif isinstance(node, OverloadedFuncDef) and node.impl: + impl = node.impl.func if isinstance(node.impl, Decorator) else node.impl + is_trivial_body = impl.is_trivial_body if impl else False + dataclass_transform_spec = find_dataclass_transform_spec(node) return ( "Func", common, @@ -219,6 +240,8 @@ def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> node.is_class, node.is_static, signature, + is_trivial_body, + dataclass_transform_spec.serialize() if dataclass_transform_spec is not None else None, ) elif isinstance(node, Var): return ("Var", common, snapshot_optional_type(node.type), node.is_final) @@ -236,11 +259,16 @@ def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> snapshot_definition(node.func, common), ) elif isinstance(node, TypeInfo): + dataclass_transform_spec = node.dataclass_transform_spec + if dataclass_transform_spec is None: + dataclass_transform_spec = find_dataclass_transform_spec(node) + attrs = ( node.is_abstract, node.is_enum, node.is_protocol, node.fallback_to_any, + node.meta_fallback_to_any, node.is_named_tuple, node.is_newtype, # We need this to e.g. trigger metaclass calculation in subclasses. @@ -259,6 +287,7 @@ def snapshot_definition(node: SymbolNode | None, common: tuple[object, ...]) -> tuple(snapshot_type(tdef) for tdef in node.defn.type_vars), [snapshot_type(base) for base in node.bases], [snapshot_type(p) for p in node._promote], + dataclass_transform_spec.serialize() if dataclass_transform_spec is not None else None, ) prefix = node.fullname symbol_table = snapshot_symbol_table(prefix, node.names) @@ -275,11 +304,11 @@ def snapshot_type(typ: Type) -> SnapshotItem: return typ.accept(SnapshotTypeVisitor()) -def snapshot_optional_type(typ: Type | None) -> SnapshotItem | None: +def snapshot_optional_type(typ: Type | None) -> SnapshotItem: if typ: return snapshot_type(typ) else: - return None + return ("",) def snapshot_types(types: Sequence[Type]) -> SnapshotItem: @@ -381,21 +410,40 @@ def visit_parameters(self, typ: Parameters) -> SnapshotItem: "Parameters", snapshot_types(typ.arg_types), tuple(encode_optional_str(name) for name in typ.arg_names), - tuple(typ.arg_kinds), + tuple(k.value for k in typ.arg_kinds), ) def visit_callable_type(self, typ: CallableType) -> SnapshotItem: - # FIX generics + if typ.is_generic(): + typ = self.normalize_callable_variables(typ) return ( "CallableType", snapshot_types(typ.arg_types), snapshot_type(typ.ret_type), tuple(encode_optional_str(name) for name in typ.arg_names), - tuple(typ.arg_kinds), + tuple(k.value for k in typ.arg_kinds), typ.is_type_obj(), typ.is_ellipsis_args, + snapshot_types(typ.variables), ) + def normalize_callable_variables(self, typ: CallableType) -> CallableType: + """Normalize all type variable ids to run from -1 to -len(variables).""" + tvs = [] + tvmap: dict[TypeVarId, Type] = {} + for i, v in enumerate(typ.variables): + tid = TypeVarId(-1 - i) + if isinstance(v, TypeVarType): + tv: TypeVarLikeType = v.copy_modified(id=tid) + elif isinstance(v, TypeVarTupleType): + tv = v.copy_modified(id=tid) + else: + assert isinstance(v, ParamSpecType) + tv = v.copy_modified(id=tid) + tvs.append(tv) + tvmap[v.id] = tv + return cast(CallableType, expand_type(typ, tvmap)).copy_modified(variables=tvs) + def visit_tuple_type(self, typ: TupleType) -> SnapshotItem: return ("TupleType", snapshot_types(typ.items)) @@ -430,7 +478,7 @@ def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem: return ("TypeAliasType", typ.alias.fullname, snapshot_types(typ.args)) -def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> tuple[object, ...]: +def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> SymbolSnapshot: """Create a snapshot of the signature of a function that has no explicit signature. If the arguments to a function without signature change, it must be @@ -442,7 +490,7 @@ def snapshot_untyped_signature(func: OverloadedFuncDef | FuncItem) -> tuple[obje if isinstance(func, FuncItem): return (tuple(func.arg_names), tuple(func.arg_kinds)) else: - result = [] + result: list[SymbolSnapshot] = [] for item in func.items: if isinstance(item, Decorator): if item.var.type: diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 7a6b247c84f8..1ec6d572a82c 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -95,7 +95,6 @@ PartialType, PlaceholderType, RawExpressionType, - StarType, SyntheticTypeVisitor, TupleType, Type, @@ -110,7 +109,7 @@ UnionType, UnpackType, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import get_prefix, replace_object_state @@ -213,8 +212,8 @@ def visit_mypy_file(self, node: MypyFile) -> None: super().visit_mypy_file(node) def visit_block(self, node: Block) -> None: - super().visit_block(node) node.body = self.replace_statements(node.body) + super().visit_block(node) def visit_func_def(self, node: FuncDef) -> None: node = self.fixup(node) @@ -331,6 +330,8 @@ def visit_var(self, node: Var) -> None: def visit_type_alias(self, node: TypeAlias) -> None: self.fixup_type(node.target) + for v in node.alias_tvars: + self.fixup_type(v) super().visit_type_alias(node) # Helpers @@ -358,7 +359,7 @@ def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo: # The subclass relationships may change, so reset all caches relevant to the # old MRO. new = cast(TypeInfo, self.replacements[node]) - TypeState.reset_all_subtype_caches_for(new) + type_state.reset_all_subtype_caches_for(new) return self.fixup(node) def fixup_type(self, typ: Type | None) -> None: @@ -517,9 +518,6 @@ def visit_callable_argument(self, typ: CallableArgument) -> None: def visit_ellipsis_type(self, typ: EllipsisType) -> None: pass - def visit_star_type(self, typ: StarType) -> None: - typ.type.accept(self) - def visit_uninhabited_type(self, typ: UninhabitedType) -> None: pass diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 1bfd820efb21..05af6a3d53a1 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -54,6 +54,7 @@ MypyFile, NameExpr, Node, + OpExpr, OverloadedFuncDef, RefExpr, StarExpr, @@ -65,7 +66,7 @@ ) from mypy.traverser import TraverserVisitor from mypy.types import CallableType -from mypy.typestate import TypeState +from mypy.typestate import type_state SavedAttributes: _TypeAlias = Dict[Tuple[ClassDef, str], SymbolTableNode] @@ -140,7 +141,9 @@ def visit_class_def(self, node: ClassDef) -> None: ] with self.enter_class(node.info): super().visit_class_def(node) - TypeState.reset_subtype_caches_for(node.info) + node.defs.body.extend(node.removed_statements) + node.removed_statements = [] + type_state.reset_subtype_caches_for(node.info) # Kill the TypeInfo, since there is none before semantic analysis. node.info = CLASSDEF_NO_INFO node.analyzed = None @@ -220,10 +223,14 @@ def visit_index_expr(self, node: IndexExpr) -> None: node.analyzed = None # May have been an alias or type application. super().visit_index_expr(node) + def visit_op_expr(self, node: OpExpr) -> None: + node.analyzed = None # May have been an alias + super().visit_op_expr(node) + def strip_ref_expr(self, node: RefExpr) -> None: node.kind = None node.node = None - node.fullname = None + node.fullname = "" node.is_new_def = False node.is_inferred_def = False diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 7cb4aeda7534..50b66b70b8aa 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -81,7 +81,8 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a from __future__ import annotations -from typing import DefaultDict, List +from collections import defaultdict +from typing import List from mypy.nodes import ( GDEF, @@ -171,7 +172,7 @@ class 'mod.Cls'. This can also refer to an attribute inherited from a UnpackType, get_proper_type, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import correct_relative_import @@ -220,7 +221,7 @@ def __init__( self, type_map: dict[Expression, Type], python_version: tuple[int, int], - alias_deps: DefaultDict[str, set[str]], + alias_deps: defaultdict[str, set[str]], options: Options | None = None, ) -> None: self.scope = Scope() @@ -288,13 +289,9 @@ def visit_decorator(self, o: Decorator) -> None: # all call sites, making them all `Any`. for d in o.decorators: tname: str | None = None - if isinstance(d, RefExpr) and d.fullname is not None: + if isinstance(d, RefExpr) and d.fullname: tname = d.fullname - if ( - isinstance(d, CallExpr) - and isinstance(d.callee, RefExpr) - and d.callee.fullname is not None - ): + if isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and d.callee.fullname: tname = d.callee.fullname if tname is not None: self.add_dependency(make_trigger(tname), make_trigger(o.func.fullname)) @@ -343,7 +340,7 @@ def process_type_info(self, info: TypeInfo) -> None: self.add_dependency( make_wildcard_trigger(base_info.fullname), target=make_trigger(target) ) - # More protocol dependencies are collected in TypeState._snapshot_protocol_deps + # More protocol dependencies are collected in type_state._snapshot_protocol_deps # after a full run or update is finished. self.add_type_alias_deps(self.scope.current_target()) @@ -499,7 +496,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: if ( isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) - and rvalue.callee.fullname is not None + and rvalue.callee.fullname ): fname: str | None = None if isinstance(rvalue.callee.node, TypeInfo): @@ -509,7 +506,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: fname = init.node.fullname else: fname = rvalue.callee.fullname - if fname is None: + if not fname: return for lv in o.lvalues: if isinstance(lv, RefExpr) and lv.fullname and lv.is_new_def: @@ -637,7 +634,7 @@ def visit_del_stmt(self, o: DelStmt) -> None: # Expressions def process_global_ref_expr(self, o: RefExpr) -> None: - if o.fullname is not None: + if o.fullname: self.add_dependency(make_trigger(o.fullname)) # If this is a reference to a type, generate a dependency to its @@ -968,6 +965,9 @@ def visit_instance(self, typ: Instance) -> list[str]: triggers.extend(self.get_type_triggers(arg)) if typ.last_known_value: triggers.extend(self.get_type_triggers(typ.last_known_value)) + if typ.extra_attrs and typ.extra_attrs.mod_name: + # Module as type effectively depends on all module attributes, use wildcard. + triggers.append(make_wildcard_trigger(typ.extra_attrs.mod_name)) return triggers def visit_type_alias_type(self, typ: TypeAliasType) -> list[str]: @@ -1119,7 +1119,7 @@ def dump_all_dependencies( deps = get_dependencies(node, type_map, python_version, options) for trigger, targets in deps.items(): all_deps.setdefault(trigger, set()).update(targets) - TypeState.add_all_protocol_deps(all_deps) + type_state.add_all_protocol_deps(all_deps) for trigger, targets in sorted(all_deps.items(), key=lambda x: x[0]): print(trigger) diff --git a/mypy/server/objgraph.py b/mypy/server/objgraph.py index f15d503f0f16..89a086b8a0ab 100644 --- a/mypy/server/objgraph.py +++ b/mypy/server/objgraph.py @@ -64,11 +64,11 @@ def get_edges(o: object) -> Iterator[tuple[object, object]]: # in closures and self pointers to other objects if hasattr(e, "__closure__"): - yield (s, "__closure__"), e.__closure__ # type: ignore + yield (s, "__closure__"), e.__closure__ if hasattr(e, "__self__"): - se = e.__self__ # type: ignore + se = e.__self__ if se is not o and se is not type(o) and hasattr(s, "__self__"): - yield s.__self__, se # type: ignore + yield s.__self__, se else: if not type(e) in TYPE_BLACKLIST: yield s, e diff --git a/mypy/server/update.py b/mypy/server/update.py index ed059259c7a6..00b823c99dfd 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -115,6 +115,7 @@ from __future__ import annotations import os +import re import sys import time from typing import Callable, NamedTuple, Sequence, Union @@ -150,13 +151,17 @@ semantic_analysis_for_scc, semantic_analysis_for_targets, ) -from mypy.server.astdiff import SnapshotItem, compare_symbol_table_snapshots, snapshot_symbol_table +from mypy.server.astdiff import ( + SymbolSnapshot, + compare_symbol_table_snapshots, + snapshot_symbol_table, +) from mypy.server.astmerge import merge_asts from mypy.server.aststrip import SavedAttributes, strip_target from mypy.server.deps import get_dependencies_of_target, merge_dependencies from mypy.server.target import trigger_to_target from mypy.server.trigger import WILDCARD_TAG, make_trigger -from mypy.typestate import TypeState +from mypy.typestate import type_state from mypy.util import module_prefix, split_target MAX_ITER: Final = 1000 @@ -182,7 +187,7 @@ def __init__(self, result: BuildResult) -> None: # Merge in any root dependencies that may not have been loaded merge_dependencies(manager.load_fine_grained_deps(FAKE_ROOT_MODULE), self.deps) self.previous_targets_with_errors = manager.errors.targets() - self.previous_messages = result.errors[:] + self.previous_messages: list[str] = result.errors[:] # Module, if any, that had blocking errors in the last run as (id, path) tuple. self.blocking_error: tuple[str, str] | None = None # Module that we haven't processed yet but that are known to be stale. @@ -202,7 +207,10 @@ def __init__(self, result: BuildResult) -> None: self.processed_targets: list[str] = [] def update( - self, changed_modules: list[tuple[str, str]], removed_modules: list[tuple[str, str]] + self, + changed_modules: list[tuple[str, str]], + removed_modules: list[tuple[str, str]], + followed: bool = False, ) -> list[str]: """Update previous build result by processing changed modules. @@ -218,6 +226,7 @@ def update( Assume this is correct; it's not validated here. removed_modules: Modules that have been deleted since the previous update or removed from the build. + followed: If True, the modules were found through following imports Returns: A list of errors. @@ -255,7 +264,9 @@ def update( self.blocking_error = None while True: - result = self.update_one(changed_modules, initial_set, removed_set, blocking_error) + result = self.update_one( + changed_modules, initial_set, removed_set, blocking_error, followed + ) changed_modules, (next_id, next_path), blocker_messages = result if blocker_messages is not None: @@ -290,6 +301,7 @@ def update( messages = self.manager.errors.new_messages() break + messages = sort_messages_preserving_file_order(messages, self.previous_messages) self.previous_messages = messages[:] return messages @@ -327,6 +339,7 @@ def update_one( initial_set: set[str], removed_set: set[str], blocking_error: str | None, + followed: bool, ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Process a module from the list of changed modules. @@ -353,7 +366,7 @@ def update_one( ) return changed_modules, (next_id, next_path), None - result = self.update_module(next_id, next_path, next_id in removed_set) + result = self.update_module(next_id, next_path, next_id in removed_set, followed) remaining, (next_id, next_path), blocker_messages = result changed_modules = [(id, path) for id, path in changed_modules if id != next_id] changed_modules = dedupe_modules(remaining + changed_modules) @@ -366,7 +379,7 @@ def update_one( return changed_modules, (next_id, next_path), blocker_messages def update_module( - self, module: str, path: str, force_removed: bool + self, module: str, path: str, force_removed: bool, followed: bool ) -> tuple[list[tuple[str, str]], tuple[str, str], list[str] | None]: """Update a single modified module. @@ -378,6 +391,7 @@ def update_module( path: File system path of the module force_removed: If True, consider module removed from the build even if path exists (used for removing an existing file from the build) + followed: Was this found via import following? Returns: Tuple with these items: @@ -407,7 +421,7 @@ def update_module( t0 = time.time() # Record symbol table snapshot of old version the changed module. - old_snapshots: dict[str, dict[str, SnapshotItem]] = {} + old_snapshots: dict[str, dict[str, SymbolSnapshot]] = {} if module in manager.modules: snapshot = snapshot_symbol_table(module, manager.modules[module].names) old_snapshots[module] = snapshot @@ -415,7 +429,7 @@ def update_module( manager.errors.reset() self.processed_targets.append(module) result = update_module_isolated( - module, path, manager, previous_modules, graph, force_removed + module, path, manager, previous_modules, graph, force_removed, followed ) if isinstance(result, BlockedUpdate): # Blocking error -- just give up @@ -550,6 +564,7 @@ def update_module_isolated( previous_modules: dict[str, str], graph: Graph, force_removed: bool, + followed: bool, ) -> UpdateResult: """Build a new version of one changed module only. @@ -573,7 +588,7 @@ def update_module_isolated( delete_module(module, path, graph, manager) return NormalUpdate(module, path, [], None) - sources = get_sources(manager.fscache, previous_modules, [(module, path)]) + sources = get_sources(manager.fscache, previous_modules, [(module, path)], followed) if module in manager.missing_modules: manager.missing_modules.remove(module) @@ -651,6 +666,7 @@ def restore(ids: list[str]) -> None: state.type_checker().reset() state.type_check_first_pass() state.type_check_second_pass() + state.detect_possibly_undefined_vars() t2 = time.time() state.finish_passes() t3 = time.time() @@ -725,18 +741,21 @@ def get_module_to_path_map(graph: Graph) -> dict[str, str]: def get_sources( - fscache: FileSystemCache, modules: dict[str, str], changed_modules: list[tuple[str, str]] + fscache: FileSystemCache, + modules: dict[str, str], + changed_modules: list[tuple[str, str]], + followed: bool, ) -> list[BuildSource]: sources = [] for id, path in changed_modules: if fscache.isfile(path): - sources.append(BuildSource(path, id, None)) + sources.append(BuildSource(path, id, None, followed=followed)) return sources def calculate_active_triggers( manager: BuildManager, - old_snapshots: dict[str, dict[str, SnapshotItem]], + old_snapshots: dict[str, dict[str, SymbolSnapshot]], new_modules: dict[str, MypyFile | None], ) -> set[str]: """Determine activated triggers by comparing old and new symbol tables. @@ -854,7 +873,7 @@ def propagate_changes_using_dependencies( # We need to do this to avoid false negatives if the protocol itself is # unchanged, but was marked stale because its sub- (or super-) type changed. for info in stale_protos: - TypeState.reset_subtype_caches_for(info) + type_state.reset_subtype_caches_for(info) # Then fully reprocess all targets. # TODO: Preserve order (set is not optimal) for id, nodes in sorted(todo.items(), key=lambda x: x[0]): @@ -962,9 +981,10 @@ def key(node: FineGrainedDeferredNode) -> int: nodes = sorted(nodeset, key=key) - options = graph[module_id].options + state = graph[module_id] + options = state.options manager.errors.set_file_ignored_lines( - file_node.path, file_node.ignored_lines, options.ignore_errors + file_node.path, file_node.ignored_lines, options.ignore_errors or state.ignore_all ) targets = set() @@ -1065,7 +1085,7 @@ def update_deps( for trigger, targets in new_deps.items(): deps.setdefault(trigger, set()).update(targets) # Merge also the newly added protocol deps (if any). - TypeState.update_protocol_deps(deps) + type_state.update_protocol_deps(deps) def lookup_target( @@ -1258,3 +1278,61 @@ def refresh_suppressed_submodules( state.suppressed.append(submodule) state.suppressed_set.add(submodule) return messages + + +def extract_fnam_from_message(message: str) -> str | None: + m = re.match(r"([^:]+):[0-9]+: (error|note): ", message) + if m: + return m.group(1) + return None + + +def extract_possible_fnam_from_message(message: str) -> str: + # This may return non-path things if there is some random colon on the line + return message.split(":", 1)[0] + + +def sort_messages_preserving_file_order( + messages: list[str], prev_messages: list[str] +) -> list[str]: + """Sort messages so that the order of files is preserved. + + An update generates messages so that the files can be in a fairly + arbitrary order. Preserve the order of files to avoid messages + getting reshuffled continuously. If there are messages in + additional files, sort them towards the end. + """ + # Calculate file order from the previous messages + n = 0 + order = {} + for msg in prev_messages: + fnam = extract_fnam_from_message(msg) + if fnam and fnam not in order: + order[fnam] = n + n += 1 + + # Related messages must be sorted as a group of successive lines + groups = [] + i = 0 + while i < len(messages): + msg = messages[i] + maybe_fnam = extract_possible_fnam_from_message(msg) + group = [msg] + if maybe_fnam in order: + # This looks like a file name. Collect all lines related to this message. + while ( + i + 1 < len(messages) + and extract_possible_fnam_from_message(messages[i + 1]) not in order + and extract_fnam_from_message(messages[i + 1]) is None + and not messages[i + 1].startswith("mypy: ") + ): + i += 1 + group.append(messages[i]) + groups.append((order.get(maybe_fnam, n), group)) + i += 1 + + groups = sorted(groups, key=lambda g: g[0]) + result = [] + for key, group in groups: + result.extend(group) + return result diff --git a/mypy/solve.py b/mypy/solve.py index c9c7db1ae26c..b8304d29c1ce 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -18,7 +18,7 @@ UnionType, get_proper_type, ) -from mypy.typestate import TypeState +from mypy.typestate import type_state def solve_constraints( @@ -54,7 +54,7 @@ def solve_constraints( if bottom is None: bottom = c.target else: - if TypeState.infer_unions: + if type_state.infer_unions: # This deviates from the general mypy semantics because # recursive types are union-heavy in 95% of cases. bottom = UnionType.make_union([bottom, c.target]) diff --git a/mypy/stats.py b/mypy/stats.py index f68edd7b9c04..b3a32c1ce72c 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -3,7 +3,6 @@ from __future__ import annotations import os -import typing from collections import Counter from contextlib import contextmanager from typing import Iterator, cast @@ -102,7 +101,7 @@ def __init__( self.line_map: dict[int, int] = {} - self.type_of_any_counter: typing.Counter[int] = Counter() + self.type_of_any_counter: Counter[int] = Counter() self.any_line_map: dict[int, list[AnyType]] = {} # For each scope (top level/function), whether the scope was type checked @@ -150,7 +149,7 @@ def visit_func_def(self, o: FuncDef) -> None: if o in o.expanded: print( "{}:{}: ERROR: cycle in function expansion; skipping".format( - self.filename, o.get_line() + self.filename, o.line ) ) return diff --git a/mypy/strconv.py b/mypy/strconv.py index 1acf7699316c..b2e9da5dbf6a 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -50,7 +50,7 @@ def dump(self, nodes: Sequence[object], obj: mypy.nodes.Context) -> str: number. See mypy.util.dump_tagged for a description of the nodes argument. """ - tag = short_type(obj) + ":" + str(obj.get_line()) + tag = short_type(obj) + ":" + str(obj.line) if self.show_ids: assert self.id_mapper is not None tag += f"<{self.get_id(obj)}>" @@ -276,6 +276,8 @@ def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> str: def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> str: a: list[Any] = [o.body] + if o.is_star: + a.append("*") for i in range(len(o.vars)): a.append(o.types[i]) @@ -365,7 +367,7 @@ def pretty_name( id = "" if isinstance(target_node, mypy.nodes.MypyFile) and name == fullname: n += id - elif kind == mypy.nodes.GDEF or (fullname != name and fullname is not None): + elif kind == mypy.nodes.GDEF or (fullname != name and fullname): # Append fully qualified name for global references. n += f" [{fullname}{id}]" elif kind == mypy.nodes.LDEF: @@ -411,6 +413,8 @@ def visit_call_expr(self, o: mypy.nodes.CallExpr) -> str: return self.dump(a + extra, o) def visit_op_expr(self, o: mypy.nodes.OpExpr) -> str: + if o.analyzed: + return o.analyzed.accept(self) return self.dump([o.op, o.left, o.right], o) def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> str: diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 243db68f7a80..6cb4669887fe 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -48,7 +48,7 @@ import sys import traceback from collections import defaultdict -from typing import Dict, Iterable, List, Mapping, Optional, cast +from typing import Iterable, List, Mapping, cast from typing_extensions import Final import mypy.build @@ -95,6 +95,7 @@ MemberExpr, MypyFile, NameExpr, + OpExpr, OverloadedFuncDef, Statement, StrExpr, @@ -104,7 +105,13 @@ ) from mypy.options import Options as MypyOptions from mypy.stubdoc import Sig, find_unique_signatures, parse_all_signatures -from mypy.stubgenc import generate_stub_for_c_module +from mypy.stubgenc import ( + DocstringSignatureGenerator, + ExternalSignatureGenerator, + FallbackSignatureGenerator, + SignatureGenerator, + generate_stub_for_c_module, +) from mypy.stubutil import ( CantImport, common_dir_prefix, @@ -127,6 +134,7 @@ TypeList, TypeStrVisitor, UnboundType, + UnionType, get_proper_type, ) from mypy.visitor import NodeVisitor @@ -183,6 +191,22 @@ "__iter__", } +# These magic methods always return the same type. +KNOWN_MAGIC_METHODS_RETURN_TYPES: Final = { + "__len__": "int", + "__length_hint__": "int", + "__init__": "None", + "__del__": "None", + "__bool__": "bool", + "__bytes__": "bytes", + "__format__": "str", + "__contains__": "bool", + "__complex__": "complex", + "__int__": "int", + "__float__": "float", + "__index__": "int", +} + class Options: """Represents stubgen options. @@ -303,6 +327,9 @@ def visit_none_type(self, t: NoneType) -> str: def visit_type_list(self, t: TypeList) -> str: return f"[{self.list_str(t.items)}]" + def visit_union_type(self, t: UnionType) -> str: + return " | ".join([item.accept(self) for item in t.items]) + def args_str(self, args: Iterable[Type]) -> str: """Convert an array of arguments to strings and join the results with commas. @@ -380,6 +407,9 @@ def visit_list_expr(self, node: ListExpr) -> str: def visit_ellipsis(self, node: EllipsisExpr) -> str: return "..." + def visit_op_expr(self, o: OpExpr) -> str: + return f"{o.left.accept(self)} {o.op} {o.right.accept(self)}" + class ImportTracker: """Record necessary imports during stub generation.""" @@ -643,7 +673,7 @@ def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: self.visit_func_def(item.func, is_abstract=is_abstract, is_overload=is_overload) if is_overload: overload_chain = True - elif overload_chain and is_overload: + elif is_overload: self.visit_func_def(item.func, is_abstract=is_abstract, is_overload=is_overload) else: # skip the overload implementation and clear the decorator we just processed @@ -725,12 +755,12 @@ def visit_func_def( retname = None # implicit Any else: retname = self.print_annotation(o.unanalyzed_type.ret_type) - elif isinstance(o, FuncDef) and ( - o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE - ): + elif o.abstract_status == IS_ABSTRACT or o.name in METHODS_WITH_RETURN_VALUE: # Always assume abstract methods return Any unless explicitly annotated. Also # some dunder methods should not have a None return type. retname = None # implicit Any + elif o.name in KNOWN_MAGIC_METHODS_RETURN_TYPES: + retname = KNOWN_MAGIC_METHODS_RETURN_TYPES[o.name] elif has_yield_expression(o): self.add_abc_import("Generator") yield_name = "None" @@ -973,8 +1003,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: self.process_namedtuple(lvalue, o.rvalue) continue if ( - self.is_top_level() - and isinstance(lvalue, NameExpr) + isinstance(lvalue, NameExpr) and not self.is_private_name(lvalue.name) and # it is never an alias with explicit annotation @@ -985,7 +1014,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: continue if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): items = lvalue.items - if isinstance(o.unanalyzed_type, TupleType): # type: ignore + if isinstance(o.unanalyzed_type, TupleType): # type: ignore[misc] annotations: Iterable[Type | None] = o.unanalyzed_type.items else: annotations = [None] * len(items) @@ -1092,7 +1121,7 @@ def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: def process_typealias(self, lvalue: NameExpr, rvalue: Expression) -> None: p = AliasPrinter(self) - self.add(f"{lvalue.name} = {rvalue.accept(p)}\n") + self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") self.record_name(lvalue.name) self._vars[-1].append(lvalue.name) @@ -1287,10 +1316,7 @@ def is_private_name(self, name: str, fullname: str | None = None) -> bool: def is_private_member(self, fullname: str) -> bool: parts = fullname.split(".") - for part in parts: - if self.is_private_name(part): - return True - return False + return any(self.is_private_name(part) for part in parts) def get_str_type_of_node( self, rvalue: Expression, can_infer_optional: bool = False, can_be_any: bool = True @@ -1628,6 +1654,18 @@ def generate_stub_from_ast( file.write("".join(gen.output())) +def get_sig_generators(options: Options) -> List[SignatureGenerator]: + sig_generators: List[SignatureGenerator] = [ + DocstringSignatureGenerator(), + FallbackSignatureGenerator(), + ] + if options.doc_dir: + # Collect info from docs (if given). Always check these first. + sigs, class_sigs = collect_docs_signatures(options.doc_dir) + sig_generators.insert(0, ExternalSignatureGenerator(sigs, class_sigs)) + return sig_generators + + def collect_docs_signatures(doc_dir: str) -> tuple[dict[str, str], dict[str, str]]: """Gather all function and class signatures in the docs. @@ -1650,12 +1688,7 @@ def generate_stubs(options: Options) -> None: """Main entry point for the program.""" mypy_opts = mypy_options(options) py_modules, c_modules = collect_build_targets(options, mypy_opts) - - # Collect info from docs (if given): - sigs = class_sigs = None # type: Optional[Dict[str, str]] - if options.doc_dir: - sigs, class_sigs = collect_docs_signatures(options.doc_dir) - + sig_generators = get_sig_generators(options) # Use parsed sources to generate stubs for Python modules. generate_asts_for_modules(py_modules, options.parse_only, mypy_opts, options.verbose) files = [] @@ -1682,7 +1715,7 @@ def generate_stubs(options: Options) -> None: target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): - generate_stub_for_c_module(mod.module, target, sigs=sigs, class_sigs=class_sigs) + generate_stub_for_c_module(mod.module, target, sig_generators=sig_generators) num_modules = len(py_modules) + len(c_modules) if not options.quiet and num_modules > 0: print("Processed %d modules" % num_modules) @@ -1733,9 +1766,7 @@ def parse_options(args: list[str]) -> Options: parser.add_argument( "--export-less", action="store_true", - help=( - "don't implicitly export all names imported from other modules " "in the same package" - ), + help="don't implicitly export all names imported from other modules in the same package", ) parser.add_argument("-v", "--verbose", action="store_true", help="show more verbose messages") parser.add_argument("-q", "--quiet", action="store_true", help="show fewer messages") diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 6b3f9d47b34a..add33e66cee3 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -10,8 +10,9 @@ import inspect import os.path import re +from abc import abstractmethod from types import ModuleType -from typing import Any, Mapping +from typing import Any, Iterable, Mapping from typing_extensions import Final from mypy.moduleinspect import is_c_module @@ -40,16 +41,119 @@ ) +class SignatureGenerator: + """Abstract base class for extracting a list of FunctionSigs for each function.""" + + @abstractmethod + def get_function_sig( + self, func: object, module_name: str, name: str + ) -> list[FunctionSig] | None: + pass + + @abstractmethod + def get_method_sig( + self, func: object, module_name: str, class_name: str, name: str, self_var: str + ) -> list[FunctionSig] | None: + pass + + +class ExternalSignatureGenerator(SignatureGenerator): + def __init__( + self, func_sigs: dict[str, str] | None = None, class_sigs: dict[str, str] | None = None + ): + """ + Takes a mapping of function/method names to signatures and class name to + class signatures (usually corresponds to __init__). + """ + self.func_sigs = func_sigs or {} + self.class_sigs = class_sigs or {} + + def get_function_sig( + self, func: object, module_name: str, name: str + ) -> list[FunctionSig] | None: + if name in self.func_sigs: + return [ + FunctionSig( + name=name, + args=infer_arg_sig_from_anon_docstring(self.func_sigs[name]), + ret_type="Any", + ) + ] + else: + return None + + def get_method_sig( + self, func: object, module_name: str, class_name: str, name: str, self_var: str + ) -> list[FunctionSig] | None: + if ( + name in ("__new__", "__init__") + and name not in self.func_sigs + and class_name in self.class_sigs + ): + return [ + FunctionSig( + name=name, + args=infer_arg_sig_from_anon_docstring(self.class_sigs[class_name]), + ret_type="None" if name == "__init__" else "Any", + ) + ] + return self.get_function_sig(func, module_name, name) + + +class DocstringSignatureGenerator(SignatureGenerator): + def get_function_sig( + self, func: object, module_name: str, name: str + ) -> list[FunctionSig] | None: + docstr = getattr(func, "__doc__", None) + inferred = infer_sig_from_docstring(docstr, name) + if inferred: + assert docstr is not None + if is_pybind11_overloaded_function_docstring(docstr, name): + # Remove pybind11 umbrella (*args, **kwargs) for overloaded functions + del inferred[-1] + return inferred + + def get_method_sig( + self, func: object, module_name: str, class_name: str, name: str, self_var: str + ) -> list[FunctionSig] | None: + return self.get_function_sig(func, module_name, name) + + +class FallbackSignatureGenerator(SignatureGenerator): + def get_function_sig( + self, func: object, module_name: str, name: str + ) -> list[FunctionSig] | None: + return [ + FunctionSig( + name=name, + args=infer_arg_sig_from_anon_docstring("(*args, **kwargs)"), + ret_type="Any", + ) + ] + + def get_method_sig( + self, func: object, module_name: str, class_name: str, name: str, self_var: str + ) -> list[FunctionSig] | None: + return [ + FunctionSig( + name=name, + args=infer_method_sig(name, self_var), + ret_type="None" if name == "__init__" else "Any", + ) + ] + + def generate_stub_for_c_module( - module_name: str, - target: str, - sigs: dict[str, str] | None = None, - class_sigs: dict[str, str] | None = None, + module_name: str, target: str, sig_generators: Iterable[SignatureGenerator] ) -> None: """Generate stub for C module. - This combines simple runtime introspection (looking for docstrings and attributes - with simple builtin types) and signatures inferred from .rst documentation (if given). + Signature generators are called in order until a list of signatures is returned. The order + is: + - signatures inferred from .rst documentation (if given) + - simple runtime introspection (looking for docstrings and attributes + with simple builtin types) + - fallback based special method names or "(*args, **kwargs)" If directory for target doesn't exist it will be created. Existing stub will be overwritten. @@ -65,7 +169,9 @@ def generate_stub_for_c_module( items = sorted(module.__dict__.items(), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): - generate_c_function_stub(module, name, obj, functions, imports=imports, sigs=sigs) + generate_c_function_stub( + module, name, obj, functions, imports=imports, sig_generators=sig_generators + ) done.add(name) types: list[str] = [] for name, obj in items: @@ -73,7 +179,7 @@ def generate_stub_for_c_module( continue if is_c_type(obj): generate_c_type_stub( - module, name, obj, types, imports=imports, sigs=sigs, class_sigs=class_sigs + module, name, obj, types, imports=imports, sig_generators=sig_generators ) done.add(name) variables = [] @@ -153,10 +259,9 @@ def generate_c_function_stub( obj: object, output: list[str], imports: list[str], + sig_generators: Iterable[SignatureGenerator], self_var: str | None = None, - sigs: dict[str, str] | None = None, class_name: str | None = None, - class_sigs: dict[str, str] | None = None, ) -> None: """Generate stub for a single function or method. @@ -165,60 +270,38 @@ def generate_c_function_stub( The 'class_name' is used to find signature of __init__ or __new__ in 'class_sigs'. """ - if sigs is None: - sigs = {} - if class_sigs is None: - class_sigs = {} - - ret_type = "None" if name == "__init__" and class_name else "Any" - - if ( - name in ("__new__", "__init__") - and name not in sigs - and class_name - and class_name in class_sigs - ): - inferred: list[FunctionSig] | None = [ - FunctionSig( - name=name, - args=infer_arg_sig_from_anon_docstring(class_sigs[class_name]), - ret_type=ret_type, - ) - ] + inferred: list[FunctionSig] | None = None + if class_name: + # method: + assert self_var is not None, "self_var should be provided for methods" + for sig_gen in sig_generators: + inferred = sig_gen.get_method_sig(obj, module.__name__, class_name, name, self_var) + if inferred: + # add self/cls var, if not present + for sig in inferred: + if not sig.args or sig.args[0].name != self_var: + sig.args.insert(0, ArgSig(name=self_var)) + break else: - docstr = getattr(obj, "__doc__", None) - inferred = infer_sig_from_docstring(docstr, name) - if inferred: - assert docstr is not None - if is_pybind11_overloaded_function_docstring(docstr, name): - # Remove pybind11 umbrella (*args, **kwargs) for overloaded functions - del inferred[-1] - if not inferred: - if class_name and name not in sigs: - inferred = [ - FunctionSig(name, args=infer_method_sig(name, self_var), ret_type=ret_type) - ] - else: - inferred = [ - FunctionSig( - name=name, - args=infer_arg_sig_from_anon_docstring( - sigs.get(name, "(*args, **kwargs)") - ), - ret_type=ret_type, - ) - ] - elif class_name and self_var: - args = inferred[0].args - if not args or args[0].name != self_var: - args.insert(0, ArgSig(name=self_var)) + # function: + for sig_gen in sig_generators: + inferred = sig_gen.get_function_sig(obj, module.__name__, name) + if inferred: + break + + if not inferred: + raise ValueError( + "No signature was found. This should never happen " + "if FallbackSignatureGenerator is provided" + ) + is_classmethod = self_var == "cls" is_overloaded = len(inferred) > 1 if inferred else False if is_overloaded: imports.append("from typing import overload") if inferred: for signature in inferred: - sig = [] + args: list[str] = [] for arg in signature.args: if arg.name == self_var: arg_def = self_var @@ -233,14 +316,16 @@ def generate_c_function_stub( if arg.default: arg_def += " = ..." - sig.append(arg_def) + args.append(arg_def) if is_overloaded: output.append("@overload") + if is_classmethod: + output.append("@classmethod") output.append( "def {function}({args}) -> {ret}: ...".format( function=name, - args=", ".join(sig), + args=", ".join(args), ret=strip_or_import(signature.ret_type, module, imports), ) ) @@ -338,8 +423,7 @@ def generate_c_type_stub( obj: type, output: list[str], imports: list[str], - sigs: dict[str, str] | None = None, - class_sigs: dict[str, str] | None = None, + sig_generators: Iterable[SignatureGenerator], ) -> None: """Generate stub for a single class using runtime introspection. @@ -369,7 +453,6 @@ def generate_c_type_stub( continue attr = "__init__" if is_c_classmethod(value): - methods.append("@classmethod") self_var = "cls" else: self_var = "self" @@ -380,9 +463,8 @@ def generate_c_type_stub( methods, imports=imports, self_var=self_var, - sigs=sigs, class_name=class_name, - class_sigs=class_sigs, + sig_generators=sig_generators, ) elif is_c_property(value): done.add(attr) @@ -398,7 +480,7 @@ def generate_c_type_stub( ) elif is_c_type(value): generate_c_type_stub( - module, attr, value, types, imports=imports, sigs=sigs, class_sigs=class_sigs + module, attr, value, types, imports=imports, sig_generators=sig_generators ) done.add(attr) diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index ef025e1caa0f..15bd96d9f4b4 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -5,6 +5,14 @@ def is_legacy_bundled_package(prefix: str) -> bool: return prefix in legacy_bundled_packages +def approved_stub_package_exists(prefix: str) -> bool: + return is_legacy_bundled_package(prefix) or prefix in non_bundled_packages + + +def stub_package_name(prefix: str) -> str: + return legacy_bundled_packages.get(prefix) or non_bundled_packages[prefix] + + # Stubs for these third-party packages used to be shipped with mypy. # # Map package name to PyPI stub distribution name. @@ -12,14 +20,11 @@ def is_legacy_bundled_package(prefix: str) -> bool: # Package name can have one or two components ('a' or 'a.b'). legacy_bundled_packages = { "aiofiles": "types-aiofiles", - "atomicwrites": "types-atomicwrites", - "attr": "types-attrs", "backports": "types-backports", "backports_abc": "types-backports_abc", "bleach": "types-bleach", "boto": "types-boto", "cachetools": "types-cachetools", - "chardet": "types-chardet", "click_spinner": "types-click-spinner", "contextvars": "types-contextvars", "croniter": "types-croniter", @@ -30,7 +35,6 @@ def is_legacy_bundled_package(prefix: str) -> bool: "decorator": "types-decorator", "deprecated": "types-Deprecated", "docutils": "types-docutils", - "emoji": "types-emoji", "first": "types-first", "geoip2": "types-geoip2", "gflags": "types-python-gflags", @@ -56,7 +60,6 @@ def is_legacy_bundled_package(prefix: str) -> bool: "six": "types-six", "slugify": "types-python-slugify", "tabulate": "types-tabulate", - "termcolor": "types-termcolor", "toml": "types-toml", "typed_ast": "types-typed-ast", "tzlocal": "types-tzlocal", @@ -64,3 +67,117 @@ def is_legacy_bundled_package(prefix: str) -> bool: "waitress": "types-waitress", "yaml": "types-PyYAML", } + +# Map package name to PyPI stub distribution name from typeshed. +# Stubs for these packages were never bundled with mypy. Don't +# include packages that have a release that includes PEP 561 type +# information. +# +# Package name can have one or two components ('a' or 'a.b'). +# +# Note that these packages are omitted for now: +# sqlalchemy: It's unclear which stub package to suggest. There's also +# a mypy plugin available. +# pika: typeshed's stubs are on PyPI as types-pika-ts. +# types-pika already exists on PyPI, and is more complete in many ways, +# but is a non-typeshed stubs package. +non_bundled_packages = { + "MySQLdb": "types-mysqlclient", + "PIL": "types-Pillow", + "PyInstaller": "types-pyinstaller", + "Xlib": "types-python-xlib", + "annoy": "types-annoy", + "appdirs": "types-appdirs", + "aws_xray_sdk": "types-aws-xray-sdk", + "babel": "types-babel", + "backports.ssl_match_hostname": "types-backports.ssl_match_hostname", + "braintree": "types-braintree", + "bs4": "types-beautifulsoup4", + "bugbear": "types-flake8-bugbear", + "caldav": "types-caldav", + "cffi": "types-cffi", + "chevron": "types-chevron", + "colorama": "types-colorama", + "commonmark": "types-commonmark", + "consolemenu": "types-console-menu", + "crontab": "types-python-crontab", + "d3dshot": "types-D3DShot", + "dj_database_url": "types-dj-database-url", + "dockerfile_parse": "types-dockerfile-parse", + "docopt": "types-docopt", + "editdistance": "types-editdistance", + "entrypoints": "types-entrypoints", + "farmhash": "types-pyfarmhash", + "flake8_2020": "types-flake8-2020", + "flake8_builtins": "types-flake8-builtins", + "flake8_docstrings": "types-flake8-docstrings", + "flake8_plugin_utils": "types-flake8-plugin-utils", + "flake8_rst_docstrings": "types-flake8-rst-docstrings", + "flake8_simplify": "types-flake8-simplify", + "flake8_typing_imports": "types-flake8-typing-imports", + "flask_cors": "types-Flask-Cors", + "flask_migrate": "types-Flask-Migrate", + "flask_sqlalchemy": "types-Flask-SQLAlchemy", + "fpdf": "types-fpdf2", + "gdb": "types-gdb", + "google.cloud": "types-google-cloud-ndb", + "hdbcli": "types-hdbcli", + "html5lib": "types-html5lib", + "httplib2": "types-httplib2", + "humanfriendly": "types-humanfriendly", + "invoke": "types-invoke", + "jack": "types-JACK-Client", + "jmespath": "types-jmespath", + "jose": "types-python-jose", + "jsonschema": "types-jsonschema", + "keyboard": "types-keyboard", + "ldap3": "types-ldap3", + "nmap": "types-python-nmap", + "oauthlib": "types-oauthlib", + "openpyxl": "types-openpyxl", + "opentracing": "types-opentracing", + "paho.mqtt": "types-paho-mqtt", + "parsimonious": "types-parsimonious", + "passlib": "types-passlib", + "passpy": "types-passpy", + "peewee": "types-peewee", + "pep8ext_naming": "types-pep8-naming", + "playsound": "types-playsound", + "psutil": "types-psutil", + "psycopg2": "types-psycopg2", + "pyaudio": "types-pyaudio", + "pyautogui": "types-PyAutoGUI", + "pycocotools": "types-pycocotools", + "pyflakes": "types-pyflakes", + "pygments": "types-Pygments", + "pyi_splash": "types-pyinstaller", + "pynput": "types-pynput", + "pythoncom": "types-pywin32", + "pythonwin": "types-pywin32", + "pyscreeze": "types-PyScreeze", + "pysftp": "types-pysftp", + "pytest_lazyfixture": "types-pytest-lazy-fixture", + "pywintypes": "types-pywin32", + "regex": "types-regex", + "send2trash": "types-Send2Trash", + "slumber": "types-slumber", + "stdlib_list": "types-stdlib-list", + "stripe": "types-stripe", + "toposort": "types-toposort", + "tqdm": "types-tqdm", + "tree_sitter": "types-tree-sitter", + "tree_sitter_languages": "types-tree-sitter-languages", + "ttkthemes": "types-ttkthemes", + "urllib3": "types-urllib3", + "vobject": "types-vobject", + "whatthepatch": "types-whatthepatch", + "win32": "types-pywin32", + "win32api": "types-pywin32", + "win32con": "types-pywin32", + "win32com": "types-pywin32", + "win32comext": "types-pywin32", + "win32gui": "types-pywin32", + "xmltodict": "types-xmltodict", + "xxhash": "types-xxhash", + "zxcvbn": "types-zxcvbn", +} diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 378f61471437..cd173f63e2a1 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -15,6 +15,7 @@ import os import pkgutil import re +import symtable import sys import traceback import types @@ -29,11 +30,13 @@ import mypy.build import mypy.modulefinder +import mypy.nodes import mypy.state import mypy.types import mypy.version from mypy import nodes from mypy.config_parser import parse_config_file +from mypy.evalexpr import UNKNOWN, evaluate_expression from mypy.options import Options from mypy.util import FancyFormatter, bytes_to_human_readable_repr, is_dunder, plural_s @@ -126,17 +129,17 @@ def get_description(self, concise: bool = False) -> str: stub_file = stub_node.path or None stub_loc_str = "" - if stub_line: - stub_loc_str += f" at line {stub_line}" if stub_file: stub_loc_str += f" in file {Path(stub_file)}" + if stub_line: + stub_loc_str += f"{':' if stub_file else ' at line '}{stub_line}" runtime_line = None runtime_file = None if not isinstance(self.runtime_object, Missing): try: runtime_line = inspect.getsourcelines(self.runtime_object)[1] - except (OSError, TypeError): + except (OSError, TypeError, SyntaxError): pass try: runtime_file = inspect.getsourcefile(self.runtime_object) @@ -144,10 +147,10 @@ def get_description(self, concise: bool = False) -> str: pass runtime_loc_str = "" - if runtime_line: - runtime_loc_str += f" at line {runtime_line}" if runtime_file: runtime_loc_str += f" in file {Path(runtime_file)}" + if runtime_line: + runtime_loc_str += f"{':' if runtime_file else ' at line '}{runtime_line}" output = [ _style("error: ", color="red", bold=True), @@ -203,7 +206,9 @@ def test_module(module_name: str) -> Iterator[Error]: try: runtime = silent_import_module(module_name) - except Exception as e: + except KeyboardInterrupt: + raise + except BaseException as e: yield Error([module_name], f"failed to import, {type(e).__name__}: {e}", stub, MISSING) return @@ -259,10 +264,10 @@ def _verify_exported_names( if not (names_in_runtime_not_stub or names_in_stub_not_runtime): return yield Error( - object_path, + object_path + ["__all__"], ( "names exported from the stub do not correspond to the names exported at runtime. " - "This is probably due to an inaccurate `__all__` in the stub or things being missing from the stub." + "This is probably due to things being missing from the stub or an inaccurate `__all__` in the stub" ), # Pass in MISSING instead of the stub and runtime objects, as the line numbers aren't very # relevant here, and it makes for a prettier error message @@ -279,6 +284,36 @@ def _verify_exported_names( ) +def _get_imported_symbol_names(runtime: types.ModuleType) -> frozenset[str] | None: + """Retrieve the names in the global namespace which are known to be imported. + + 1). Use inspect to retrieve the source code of the module + 2). Use symtable to parse the source and retrieve names that are known to be imported + from other modules. + + If either of the above steps fails, return `None`. + + Note that if a set of names is returned, + it won't include names imported via `from foo import *` imports. + """ + try: + source = inspect.getsource(runtime) + except (OSError, TypeError, SyntaxError): + return None + + if not source.strip(): + # The source code for the module was an empty file, + # no point in parsing it with symtable + return frozenset() + + try: + module_symtable = symtable.symtable(source, runtime.__name__, "exec") + except SyntaxError: + return None + + return frozenset(sym.get_name() for sym in module_symtable.get_symbols() if sym.is_imported()) + + @verify.register(nodes.MypyFile) def verify_mypyfile( stub: nodes.MypyFile, runtime: MaybeMissing[types.ModuleType], object_path: list[str] @@ -308,15 +343,26 @@ def verify_mypyfile( if not o.module_hidden and (not is_probably_private(m) or hasattr(runtime, m)) } + imported_symbols = _get_imported_symbol_names(runtime) + def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: + """Heuristics to determine whether a name originates from another module.""" obj = getattr(r, attr) - try: - obj_mod = getattr(obj, "__module__", None) - except Exception: + if isinstance(obj, types.ModuleType): return False - if obj_mod is not None: - return obj_mod == r.__name__ - return not isinstance(obj, types.ModuleType) + if callable(obj): + # It's highly likely to be a class or a function if it's callable, + # so the __module__ attribute will give a good indication of which module it comes from + try: + obj_mod = obj.__module__ + except Exception: + pass + else: + if isinstance(obj_mod, str): + return bool(obj_mod == r.__name__) + if imported_symbols is not None: + return attr not in imported_symbols + return True runtime_public_contents = ( runtime_all_as_set @@ -325,8 +371,9 @@ def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: m for m in dir(runtime) if not is_probably_private(m) - # Ensure that the object's module is `runtime`, since in the absence of __all__ we - # don't have a good way to detect re-exports at runtime. + # Filter out objects that originate from other modules (best effort). Note that in the + # absence of __all__, we don't have a way to detect explicit / intentional re-exports + # at runtime and _belongs_to_runtime(runtime, m) } ) @@ -349,20 +396,12 @@ def _belongs_to_runtime(r: types.ModuleType, attr: str) -> bool: yield from verify(stub_entry, runtime_entry, object_path + [entry]) -@verify.register(nodes.TypeInfo) -def verify_typeinfo( - stub: nodes.TypeInfo, runtime: MaybeMissing[type[Any]], object_path: list[str] +def _verify_final( + stub: nodes.TypeInfo, runtime: type[Any], object_path: list[str] ) -> Iterator[Error]: - if isinstance(runtime, Missing): - yield Error(object_path, "is not present at runtime", stub, runtime, stub_desc=repr(stub)) - return - if not isinstance(runtime, type): - yield Error(object_path, "is not a type", stub, runtime, stub_desc=repr(stub)) - return - try: - class SubClass(runtime): # type: ignore + class SubClass(runtime): # type: ignore[misc] pass except TypeError: @@ -380,6 +419,59 @@ class SubClass(runtime): # type: ignore # Examples: ctypes.Array, ctypes._SimpleCData pass + +def _verify_metaclass( + stub: nodes.TypeInfo, runtime: type[Any], object_path: list[str] +) -> Iterator[Error]: + # We exclude protocols, because of how complex their implementation is in different versions of + # python. Enums are also hard, ignoring. + # TODO: check that metaclasses are identical? + if not stub.is_protocol and not stub.is_enum: + runtime_metaclass = type(runtime) + if runtime_metaclass is not type and stub.metaclass_type is None: + # This means that runtime has a custom metaclass, but a stub does not. + yield Error( + object_path, + "is inconsistent, metaclass differs", + stub, + runtime, + stub_desc="N/A", + runtime_desc=f"{runtime_metaclass}", + ) + elif ( + runtime_metaclass is type + and stub.metaclass_type is not None + # We ignore extra `ABCMeta` metaclass on stubs, this might be typing hack. + # We also ignore `builtins.type` metaclass as an implementation detail in mypy. + and not mypy.types.is_named_instance( + stub.metaclass_type, ("abc.ABCMeta", "builtins.type") + ) + ): + # This means that our stub has a metaclass that is not present at runtime. + yield Error( + object_path, + "metaclass mismatch", + stub, + runtime, + stub_desc=f"{stub.metaclass_type.type.fullname}", + runtime_desc="N/A", + ) + + +@verify.register(nodes.TypeInfo) +def verify_typeinfo( + stub: nodes.TypeInfo, runtime: MaybeMissing[type[Any]], object_path: list[str] +) -> Iterator[Error]: + if isinstance(runtime, Missing): + yield Error(object_path, "is not present at runtime", stub, runtime, stub_desc=repr(stub)) + return + if not isinstance(runtime, type): + yield Error(object_path, "is not a type", stub, runtime, stub_desc=repr(stub)) + return + + yield from _verify_final(stub, runtime, object_path) + yield from _verify_metaclass(stub, runtime, object_path) + # Check everything already defined on the stub class itself (i.e. not inherited) to_check = set(stub.names) # Check all public things on the runtime class @@ -401,7 +493,7 @@ class SubClass(runtime): # type: ignore for entry in sorted(to_check): mangled_entry = entry if entry.startswith("__") and not entry.endswith("__"): - mangled_entry = f"_{stub.name}{entry}" + mangled_entry = f"_{stub.name.lstrip('_')}{entry}" stub_to_verify = next((t.names[entry].node for t in stub.mro if entry in t.names), MISSING) assert stub_to_verify is not None try: @@ -528,6 +620,23 @@ def _verify_arg_default_value( f"has a default value of type {runtime_type}, " f"which is incompatible with stub argument type {stub_type}" ) + if stub_arg.initializer is not None: + stub_default = evaluate_expression(stub_arg.initializer) + if ( + stub_default is not UNKNOWN + and stub_default is not ... + and ( + stub_default != runtime_arg.default + # We want the types to match exactly, e.g. in case the stub has + # True and the runtime has 1 (or vice versa). + or type(stub_default) is not type(runtime_arg.default) # noqa: E721 + ) + ): + yield ( + f'runtime argument "{runtime_arg.name}" ' + f"has a default value of {runtime_arg.default!r}, " + f"which is different from stub argument default {stub_default!r}" + ) else: if stub_arg.kind.is_optional(): yield ( @@ -569,7 +678,7 @@ def get_type(arg: Any) -> str | None: def has_default(arg: Any) -> bool: if isinstance(arg, inspect.Parameter): - return arg.default != inspect.Parameter.empty + return bool(arg.default != inspect.Parameter.empty) if isinstance(arg, nodes.Argument): return arg.kind.is_optional() raise AssertionError @@ -825,16 +934,8 @@ def verify_funcitem( return if isinstance(stub, nodes.FuncDef): - stub_abstract = stub.abstract_status == nodes.IS_ABSTRACT - runtime_abstract = getattr(runtime, "__isabstractmethod__", False) - # The opposite can exist: some implementations omit `@abstractmethod` decorators - if runtime_abstract and not stub_abstract: - yield Error( - object_path, - "is inconsistent, runtime method is abstract but stub is not", - stub, - runtime, - ) + for error_text in _verify_abstract_status(stub, runtime): + yield Error(object_path, error_text, stub, runtime) for message in _verify_static_class_methods(stub, runtime, object_path): yield Error(object_path, "is inconsistent, " + message, stub, runtime) @@ -1021,6 +1122,15 @@ def _verify_readonly_property(stub: nodes.Decorator, runtime: Any) -> Iterator[s yield "is inconsistent, cannot reconcile @property on stub with runtime object" +def _verify_abstract_status(stub: nodes.FuncDef, runtime: Any) -> Iterator[str]: + stub_abstract = stub.abstract_status == nodes.IS_ABSTRACT + runtime_abstract = getattr(runtime, "__isabstractmethod__", False) + # The opposite can exist: some implementations omit `@abstractmethod` decorators + if runtime_abstract and not stub_abstract: + item_type = "property" if stub.is_property else "method" + yield f"is inconsistent, runtime {item_type} is abstract but stub is not" + + def _resolve_funcitem_from_decorator(dec: nodes.OverloadPart) -> nodes.FuncItem | None: """Returns a FuncItem that corresponds to the output of the decorator. @@ -1037,7 +1147,7 @@ def apply_decorator_to_funcitem( ) -> nodes.FuncItem | None: if not isinstance(decorator, nodes.RefExpr): return None - if decorator.fullname is None: + if not decorator.fullname: # Happens with namedtuple return None if ( @@ -1079,6 +1189,8 @@ def verify_decorator( if stub.func.is_property: for message in _verify_readonly_property(stub, runtime): yield Error(object_path, message, stub, runtime) + for message in _verify_abstract_status(stub.func, runtime): + yield Error(object_path, message, stub, runtime) return func = _resolve_funcitem_from_decorator(stub) @@ -1176,6 +1288,8 @@ def verify_typealias( "__annotations__", "__path__", # mypy adds __path__ to packages, but C packages don't have it "__getattr__", # resulting behaviour might be typed explicitly + # Created by `warnings.warn`, does not make much sense to have in stubs: + "__warningregistry__", # TODO: remove the following from this list "__author__", "__version__", @@ -1270,16 +1384,13 @@ def is_subtype_helper(left: mypy.types.Type, right: mypy.types.Type) -> bool: isinstance(left, mypy.types.LiteralType) and isinstance(left.value, int) and left.value in (0, 1) - and isinstance(right, mypy.types.Instance) - and right.type.fullname == "builtins.bool" + and mypy.types.is_named_instance(right, "builtins.bool") ): # Pretend Literal[0, 1] is a subtype of bool to avoid unhelpful errors. return True - if ( - isinstance(right, mypy.types.TypedDictType) - and isinstance(left, mypy.types.Instance) - and left.type.fullname == "builtins.dict" + if isinstance(right, mypy.types.TypedDictType) and mypy.types.is_named_instance( + left, "builtins.dict" ): # Special case checks against TypedDicts return True @@ -1434,7 +1545,9 @@ def build_stubs(modules: list[str], options: Options, find_submodules: bool = Fa for m in pkgutil.walk_packages(runtime.__path__, runtime.__name__ + ".") if m.name not in all_modules ) - except Exception: + except KeyboardInterrupt: + raise + except BaseException: pass if sources: @@ -1463,9 +1576,6 @@ def get_typeshed_stdlib_modules( stdlib_py_versions = mypy.modulefinder.load_stdlib_py_versions(custom_typeshed_dir) if version_info is None: version_info = sys.version_info[0:2] - # Typeshed's minimum supported Python 3 is Python 3.7 - if sys.version_info < (3, 7): - version_info = (3, 7) def exists_in_version(module: str) -> bool: assert version_info is not None @@ -1555,6 +1665,8 @@ def test_stubs(args: _Arguments, use_builtins_fixtures: bool = False) -> int: options = Options() options.incremental = False options.custom_typeshed_dir = args.custom_typeshed_dir + if options.custom_typeshed_dir: + options.abs_custom_typeshed_dir = os.path.abspath(args.custom_typeshed_dir) options.config_file = args.mypy_config_file options.use_builtins_fixtures = use_builtins_fixtures @@ -1687,7 +1799,7 @@ def parse_options(args: list[str]) -> _Arguments: parser.add_argument( "--mypy-config-file", metavar="FILE", - help=("Use specified mypy config file to determine mypy plugins " "and mypy path"), + help=("Use specified mypy config file to determine mypy plugins and mypy path"), ) parser.add_argument( "--custom-typeshed-dir", metavar="DIR", help="Use the custom typeshed in DIR" diff --git a/mypy/subtypes.py b/mypy/subtypes.py index 9e84e25695dd..c3d5517d43dd 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -8,7 +8,7 @@ import mypy.constraints import mypy.typeops from mypy.erasetype import erase_type -from mypy.expandtype import expand_type_by_instance +from mypy.expandtype import expand_self_type, expand_type_by_instance from mypy.maptype import map_instance_to_supertype # Circular import; done in the function instead. @@ -27,6 +27,7 @@ from mypy.options import Options from mypy.state import state from mypy.types import ( + MYPYC_NATIVE_INT_NAMES, TUPLE_LIKE_INSTANCE_NAMES, TYPED_NAMEDTUPLE_NAMES, AnyType, @@ -38,6 +39,7 @@ Instance, LiteralType, NoneType, + NormalizedCallableType, Overloaded, Parameters, ParamSpecType, @@ -56,18 +58,21 @@ UninhabitedType, UnionType, UnpackType, + _flattened, get_proper_type, is_named_instance, ) -from mypy.typestate import SubtypeKind, TypeState -from mypy.typevartuples import extract_unpack, split_with_instance +from mypy.typestate import SubtypeKind, type_state +from mypy.typevars import fill_typevars_with_any +from mypy.typevartuples import extract_unpack, fully_split_with_mapped_and_template # Flags for detected protocol members IS_SETTABLE: Final = 1 IS_CLASSVAR: Final = 2 IS_CLASS_OR_STATIC: Final = 3 +IS_VAR: Final = 4 -TypeParameterChecker: _TypeAlias = Callable[[Type, Type, int, bool], bool] +TypeParameterChecker: _TypeAlias = Callable[[Type, Type, int, bool, "SubtypeContext"], bool] class SubtypeContext: @@ -80,6 +85,7 @@ def __init__( ignore_declared_variance: bool = False, # Supported for both proper and non-proper ignore_promotions: bool = False, + ignore_uninhabited: bool = False, # Proper subtype flags erase_instances: bool = False, keep_erased_types: bool = False, @@ -89,6 +95,7 @@ def __init__( self.ignore_pos_arg_names = ignore_pos_arg_names self.ignore_declared_variance = ignore_declared_variance self.ignore_promotions = ignore_promotions + self.ignore_uninhabited = ignore_uninhabited self.erase_instances = erase_instances self.keep_erased_types = keep_erased_types self.options = options @@ -97,11 +104,7 @@ def check_context(self, proper_subtype: bool) -> None: # Historically proper and non-proper subtypes were defined using different helpers # and different visitors. Check if flag values are such that we definitely support. if proper_subtype: - assert ( - not self.ignore_type_params - and not self.ignore_pos_arg_names - and not self.ignore_declared_variance - ) + assert not self.ignore_pos_arg_names and not self.ignore_declared_variance else: assert not self.erase_instances and not self.keep_erased_types @@ -115,6 +118,7 @@ def is_subtype( ignore_pos_arg_names: bool = False, ignore_declared_variance: bool = False, ignore_promotions: bool = False, + ignore_uninhabited: bool = False, options: Options | None = None, ) -> bool: """Is 'left' subtype of 'right'? @@ -134,6 +138,7 @@ def is_subtype( ignore_pos_arg_names=ignore_pos_arg_names, ignore_declared_variance=ignore_declared_variance, ignore_promotions=ignore_promotions, + ignore_uninhabited=ignore_uninhabited, options=options, ) else: @@ -143,10 +148,11 @@ def is_subtype( ignore_pos_arg_names, ignore_declared_variance, ignore_promotions, + ignore_uninhabited, options, } ), "Don't pass both context and individual flags" - if TypeState.is_assumed_subtype(left, right): + if type_state.is_assumed_subtype(left, right): return True if mypy.typeops.is_recursive_pair(left, right): # This case requires special care because it may cause infinite recursion. @@ -166,7 +172,7 @@ def is_subtype( # B = Union[int, Tuple[B, ...]] # When checking if A <: B we push pair (A, B) onto 'assuming' stack, then when after few # steps we come back to initial call is_subtype(A, B) and immediately return True. - with pop_on_exit(TypeState.get_assumptions(is_proper=False), left, right): + with pop_on_exit(type_state.get_assumptions(is_proper=False), left, right): return _is_subtype(left, right, subtype_context, proper_subtype=False) return _is_subtype(left, right, subtype_context, proper_subtype=False) @@ -177,6 +183,7 @@ def is_proper_subtype( *, subtype_context: SubtypeContext | None = None, ignore_promotions: bool = False, + ignore_uninhabited: bool = False, erase_instances: bool = False, keep_erased_types: bool = False, ) -> bool: @@ -192,18 +199,25 @@ def is_proper_subtype( if subtype_context is None: subtype_context = SubtypeContext( ignore_promotions=ignore_promotions, + ignore_uninhabited=ignore_uninhabited, erase_instances=erase_instances, keep_erased_types=keep_erased_types, ) else: assert not any( - {ignore_promotions, erase_instances, keep_erased_types} + { + ignore_promotions, + ignore_uninhabited, + erase_instances, + keep_erased_types, + ignore_uninhabited, + } ), "Don't pass both context and individual flags" - if TypeState.is_assumed_proper_subtype(left, right): + if type_state.is_assumed_proper_subtype(left, right): return True if mypy.typeops.is_recursive_pair(left, right): # Same as for non-proper subtype, see detailed comment there for explanation. - with pop_on_exit(TypeState.get_assumptions(is_proper=True), left, right): + with pop_on_exit(type_state.get_assumptions(is_proper=True), left, right): return _is_subtype(left, right, subtype_context, proper_subtype=True) return _is_subtype(left, right, subtype_context, proper_subtype=True) @@ -215,6 +229,7 @@ def is_equivalent( ignore_type_params: bool = False, ignore_pos_arg_names: bool = False, options: Options | None = None, + subtype_context: SubtypeContext | None = None, ) -> bool: return is_subtype( a, @@ -222,16 +237,20 @@ def is_equivalent( ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, options=options, + subtype_context=subtype_context, ) and is_subtype( b, a, ignore_type_params=ignore_type_params, ignore_pos_arg_names=ignore_pos_arg_names, options=options, + subtype_context=subtype_context, ) -def is_same_type(a: Type, b: Type, ignore_promotions: bool = True) -> bool: +def is_same_type( + a: Type, b: Type, ignore_promotions: bool = True, subtype_context: SubtypeContext | None = None +) -> bool: """Are these types proper subtypes of each other? This means types may have different representation (e.g. an alias, or @@ -241,8 +260,10 @@ def is_same_type(a: Type, b: Type, ignore_promotions: bool = True) -> bool: # considered not the same type (which is the case at runtime). # Also Union[bool, int] (if it wasn't simplified before) will be different # from plain int, etc. - return is_proper_subtype(a, b, ignore_promotions=ignore_promotions) and is_proper_subtype( - b, a, ignore_promotions=ignore_promotions + return is_proper_subtype( + a, b, ignore_promotions=ignore_promotions, subtype_context=subtype_context + ) and is_proper_subtype( + b, a, ignore_promotions=ignore_promotions, subtype_context=subtype_context ) @@ -266,18 +287,20 @@ def _is_subtype( # ErasedType as we do for non-proper subtyping. return True - def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool: - if proper_subtype: - return is_proper_subtype(left, right, subtype_context=subtype_context) - return is_subtype(left, right, subtype_context=subtype_context) - if isinstance(right, UnionType) and not isinstance(left, UnionType): # Normally, when 'left' is not itself a union, the only way # 'left' can be a subtype of the union 'right' is if it is a # subtype of one of the items making up the union. - is_subtype_of_item = any( - check_item(orig_left, item, subtype_context) for item in right.items - ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) # Recombine rhs literal types, to make an enum type a subtype # of a union of all enum items as literal types. Only do it if # the previous check didn't succeed, since recombining can be @@ -289,9 +312,16 @@ def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool and (left.type.is_enum or left.type.fullname == "builtins.bool") ): right = UnionType(mypy.typeops.try_contracting_literals_in_union(right.items)) - is_subtype_of_item = any( - check_item(orig_left, item, subtype_context) for item in right.items - ) + if proper_subtype: + is_subtype_of_item = any( + is_proper_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) + else: + is_subtype_of_item = any( + is_subtype(orig_left, item, subtype_context=subtype_context) + for item in right.items + ) # However, if 'left' is a type variable T, T might also have # an upper bound which is itself a union. This case will be # handled below by the SubtypeVisitor. We have to check both @@ -306,24 +336,29 @@ def check_item(left: Type, right: Type, subtype_context: SubtypeContext) -> bool return left.accept(SubtypeVisitor(orig_right, subtype_context, proper_subtype)) -# TODO: should we pass on the original flags here and in couple other places? -# This seems logical but was never done in the past for some reasons. -def check_type_parameter(lefta: Type, righta: Type, variance: int, proper_subtype: bool) -> bool: - def check(left: Type, right: Type) -> bool: - return is_proper_subtype(left, right) if proper_subtype else is_subtype(left, right) - +def check_type_parameter( + left: Type, right: Type, variance: int, proper_subtype: bool, subtype_context: SubtypeContext +) -> bool: if variance == COVARIANT: - return check(lefta, righta) + if proper_subtype: + return is_proper_subtype(left, right, subtype_context=subtype_context) + else: + return is_subtype(left, right, subtype_context=subtype_context) elif variance == CONTRAVARIANT: - return check(righta, lefta) + if proper_subtype: + return is_proper_subtype(right, left, subtype_context=subtype_context) + else: + return is_subtype(right, left, subtype_context=subtype_context) else: if proper_subtype: - return is_same_type(lefta, righta) - return is_equivalent(lefta, righta) - - -def ignore_type_parameter(lefta: Type, righta: Type, variance: int, proper_subtype: bool) -> bool: - return True + # We pass ignore_promotions=False because it is a default for subtype checks. + # The actual value will be taken from the subtype_context, and it is whatever + # the original caller passed. + return is_same_type( + left, right, ignore_promotions=False, subtype_context=subtype_context + ) + else: + return is_equivalent(left, right, subtype_context=subtype_context) class SubtypeVisitor(TypeVisitor[bool]): @@ -332,9 +367,6 @@ def __init__(self, right: Type, subtype_context: SubtypeContext, proper_subtype: self.orig_right = right self.proper_subtype = proper_subtype self.subtype_context = subtype_context - self.check_type_parameter = ( - ignore_type_parameter if subtype_context.ignore_type_params else check_type_parameter - ) self.options = subtype_context.options self._subtype_kind = SubtypeVisitor.build_subtype_kind(subtype_context, proper_subtype) @@ -385,45 +417,45 @@ def visit_none_type(self, left: NoneType) -> bool: return True def visit_uninhabited_type(self, left: UninhabitedType) -> bool: - return True + # We ignore this for unsafe overload checks, so that and empty list and + # a list of int will be considered non-overlapping. + if isinstance(self.right, UninhabitedType): + return True + return not self.subtype_context.ignore_uninhabited def visit_erased_type(self, left: ErasedType) -> bool: # This may be encountered during type inference. The result probably doesn't # matter much. # TODO: it actually does matter, figure out more principled logic about this. - if self.subtype_context.keep_erased_types: - return False - return True + return not self.subtype_context.keep_erased_types def visit_deleted_type(self, left: DeletedType) -> bool: return True def visit_instance(self, left: Instance) -> bool: if left.type.fallback_to_any and not self.proper_subtype: - if isinstance(self.right, NoneType): - # NOTE: `None` is a *non-subclassable* singleton, therefore no class - # can by a subtype of it, even with an `Any` fallback. - # This special case is needed to treat descriptors in classes with - # dynamic base classes correctly, see #5456. - return False - return True + # NOTE: `None` is a *non-subclassable* singleton, therefore no class + # can by a subtype of it, even with an `Any` fallback. + # This special case is needed to treat descriptors in classes with + # dynamic base classes correctly, see #5456. + return not isinstance(self.right, NoneType) right = self.right if isinstance(right, TupleType) and mypy.typeops.tuple_fallback(right).type.is_enum: return self._is_subtype(left, mypy.typeops.tuple_fallback(right)) if isinstance(right, Instance): - if TypeState.is_cached_subtype_check(self._subtype_kind, left, right): + if type_state.is_cached_subtype_check(self._subtype_kind, left, right): return True if not self.subtype_context.ignore_promotions: for base in left.type.mro: if base._promote and any( self._is_subtype(p, self.right) for p in base._promote ): - TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return True # Special case: Low-level integer types are compatible with 'int'. We can't # use promotions, since 'int' is already promoted to low-level integer types, # and we can't have circular promotions. - if left.type.alt_promote is right.type: + if left.type.alt_promote and left.type.alt_promote.type is right.type: return True rname = right.type.fullname # Always try a nominal check if possible, @@ -447,8 +479,33 @@ def visit_instance(self, left: Instance) -> bool: t = erased nominal = True if right.type.has_type_var_tuple_type: - left_prefix, left_middle, left_suffix = split_with_instance(left) - right_prefix, right_middle, right_suffix = split_with_instance(right) + assert left.type.type_var_tuple_prefix is not None + assert left.type.type_var_tuple_suffix is not None + assert right.type.type_var_tuple_prefix is not None + assert right.type.type_var_tuple_suffix is not None + split_result = fully_split_with_mapped_and_template( + left.args, + left.type.type_var_tuple_prefix, + left.type.type_var_tuple_suffix, + right.args, + right.type.type_var_tuple_prefix, + right.type.type_var_tuple_suffix, + ) + if split_result is None: + return False + + ( + left_prefix, + left_mprefix, + left_middle, + left_msuffix, + left_suffix, + right_prefix, + right_mprefix, + right_middle, + right_msuffix, + right_suffix, + ) = split_result left_unpacked = extract_unpack(left_middle) right_unpacked = extract_unpack(right_middle) @@ -457,6 +514,11 @@ def visit_instance(self, left: Instance) -> bool: def check_mixed( unpacked_type: ProperType, compare_to: tuple[Type, ...] ) -> bool: + if ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + return all(is_equivalent(l, unpacked_type.args[0]) for l in compare_to) if isinstance(unpacked_type, TypeVarTupleType): return False if isinstance(unpacked_type, AnyType): @@ -483,13 +545,6 @@ def check_mixed( if not check_mixed(left_unpacked, right_middle): return False elif left_unpacked is None and right_unpacked is not None: - if ( - isinstance(right_unpacked, Instance) - and right_unpacked.type.fullname == "builtins.tuple" - ): - return all( - is_equivalent(l, right_unpacked.args[0]) for l in left_middle - ) if not check_mixed(right_unpacked, left_middle): return False @@ -502,35 +557,48 @@ def check_mixed( if not is_equivalent(left_t, right_t): return False + assert len(left_mprefix) == len(right_mprefix) + assert len(left_msuffix) == len(right_msuffix) + + for left_item, right_item in zip( + left_mprefix + left_msuffix, right_mprefix + right_msuffix + ): + if not is_equivalent(left_item, right_item): + return False + left_items = t.args[: right.type.type_var_tuple_prefix] right_items = right.args[: right.type.type_var_tuple_prefix] if right.type.type_var_tuple_suffix: left_items += t.args[-right.type.type_var_tuple_suffix :] right_items += right.args[-right.type.type_var_tuple_suffix :] - unpack_index = right.type.type_var_tuple_prefix assert unpack_index is not None type_params = zip( - left_prefix + right_suffix, + left_prefix + left_suffix, right_prefix + right_suffix, right.type.defn.type_vars[:unpack_index] + right.type.defn.type_vars[unpack_index + 1 :], ) else: type_params = zip(t.args, right.args, right.type.defn.type_vars) - for lefta, righta, tvar in type_params: - if isinstance(tvar, TypeVarType): - if not self.check_type_parameter( - lefta, righta, tvar.variance, self.proper_subtype - ): - nominal = False - else: - if not self.check_type_parameter( - lefta, righta, COVARIANT, self.proper_subtype - ): - nominal = False + if not self.subtype_context.ignore_type_params: + for lefta, righta, tvar in type_params: + if isinstance(tvar, TypeVarType): + if not check_type_parameter( + lefta, + righta, + tvar.variance, + self.proper_subtype, + self.subtype_context, + ): + nominal = False + else: + if not check_type_parameter( + lefta, righta, COVARIANT, self.proper_subtype, self.subtype_context + ): + nominal = False if nominal: - TypeState.record_subtype_cache_entry(self._subtype_kind, left, right) + type_state.record_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation( left, right, proper_subtype=self.proper_subtype @@ -577,6 +645,8 @@ def visit_param_spec(self, left: ParamSpecType) -> bool: and right.flavor == left.flavor ): return True + if isinstance(right, Parameters) and are_trivial_parameters(right): + return True return self._is_subtype(left.upper_bound, self.right) def visit_type_var_tuple(self, left: TypeVarTupleType) -> bool: @@ -591,8 +661,10 @@ def visit_unpack_type(self, left: UnpackType) -> bool: return False def visit_parameters(self, left: Parameters) -> bool: - right = self.right - if isinstance(right, Parameters) or isinstance(right, CallableType): + if isinstance(self.right, Parameters) or isinstance(self.right, CallableType): + right = self.right + if isinstance(right, CallableType): + right = right.with_unpacked_kwargs() return are_parameters_compatible( left, right, @@ -622,12 +694,23 @@ def visit_callable_type(self, left: CallableType) -> bool: elif isinstance(right, Overloaded): return all(self._is_subtype(left, item) for item in right.items) elif isinstance(right, Instance): - if right.type.is_protocol and right.type.protocol_members == ["__call__"]: - # OK, a callable can implement a protocol with a single `__call__` member. + if right.type.is_protocol and "__call__" in right.type.protocol_members: + # OK, a callable can implement a protocol with a `__call__` member. # TODO: we should probably explicitly exclude self-types in this case. call = find_member("__call__", right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True + if right.type.is_protocol and left.is_type_obj(): + ret_type = get_proper_type(left.ret_type) + if isinstance(ret_type, TupleType): + ret_type = mypy.typeops.tuple_fallback(ret_type) + if isinstance(ret_type, Instance) and is_protocol_implementation( + ret_type, right, proper_subtype=self.proper_subtype, class_obj=True + ): return True return self._is_subtype(left.fallback, right) elif isinstance(right, TypeType): @@ -636,7 +719,7 @@ def visit_callable_type(self, left: CallableType) -> bool: elif isinstance(right, Parameters): # this doesn't check return types.... but is needed for is_equivalent return are_parameters_compatible( - left, + left.with_unpacked_kwargs(), right, is_compat=self._is_subtype, ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, @@ -669,9 +752,8 @@ def visit_tuple_type(self, left: TupleType) -> bool: elif isinstance(right, TupleType): if len(left.items) != len(right.items): return False - for l, r in zip(left.items, right.items): - if not self._is_subtype(l, r): - return False + if any(not self._is_subtype(l, r) for l, r in zip(left.items, right.items)): + return False rfallback = mypy.typeops.tuple_fallback(right) if is_named_instance(rfallback, "builtins.tuple"): # No need to verify fallback. This is useful since the calculated fallback @@ -680,9 +762,7 @@ def visit_tuple_type(self, left: TupleType) -> bool: # join(Union[int, C], Union[str, C]) == Union[int, str, C]. return True lfallback = mypy.typeops.tuple_fallback(left) - if not self._is_subtype(lfallback, rfallback): - return False - return True + return self._is_subtype(lfallback, rfallback) else: return False @@ -694,6 +774,7 @@ def visit_typeddict_type(self, left: TypedDictType) -> bool: if not left.names_are_wider_than(right): return False for name, l, r in left.zip(right): + # TODO: should we pass on the full subtype_context here and below? if self.proper_subtype: check = is_same_type(l, r) else: @@ -730,12 +811,15 @@ def visit_literal_type(self, left: LiteralType) -> bool: def visit_overloaded(self, left: Overloaded) -> bool: right = self.right if isinstance(right, Instance): - if right.type.is_protocol and right.type.protocol_members == ["__call__"]: + if right.type.is_protocol and "__call__" in right.type.protocol_members: # same as for CallableType call = find_member("__call__", right, left, is_operator=True) assert call is not None if self._is_subtype(left, call): - return True + if len(right.type.protocol_members) == 1: + return True + if is_protocol_implementation(left.fallback, right, skip=["__call__"]): + return True return self._is_subtype(left.fallback, right) elif isinstance(right, CallableType): for item in left.items: @@ -750,9 +834,8 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Ensure each overload in the right side (the supertype) is accounted for. previous_match_left_index = -1 matched_overloads = set() - possible_invalid_overloads = set() - for right_index, right_item in enumerate(right.items): + for right_item in right.items: found_match = False for left_index, left_item in enumerate(left.items): @@ -761,43 +844,36 @@ def visit_overloaded(self, left: Overloaded) -> bool: # Order matters: we need to make sure that the index of # this item is at least the index of the previous one. if subtype_match and previous_match_left_index <= left_index: - if not found_match: - # Update the index of the previous match. - previous_match_left_index = left_index - found_match = True - matched_overloads.add(left_item) - possible_invalid_overloads.discard(left_item) + previous_match_left_index = left_index + found_match = True + matched_overloads.add(left_index) + break else: # If this one overlaps with the supertype in any way, but it wasn't # an exact match, then it's a potential error. strict_concat = self.options.strict_concatenate if self.options else True - if is_callable_compatible( - left_item, - right_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, - ) or is_callable_compatible( - right_item, - left_item, - is_compat=self._is_subtype, - ignore_return=True, - ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, - strict_concatenate=strict_concat, + if left_index not in matched_overloads and ( + is_callable_compatible( + left_item, + right_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) + or is_callable_compatible( + right_item, + left_item, + is_compat=self._is_subtype, + ignore_return=True, + ignore_pos_arg_names=self.subtype_context.ignore_pos_arg_names, + strict_concatenate=strict_concat, + ) ): - # If this is an overload that's already been matched, there's no - # problem. - if left_item not in matched_overloads: - possible_invalid_overloads.add(left_item) + return False if not found_match: return False - - if possible_invalid_overloads: - # There were potentially invalid overloads that were never matched to the - # supertype. - return False return True elif isinstance(right, UnboundType): return True @@ -824,6 +900,35 @@ def visit_union_type(self, left: UnionType) -> bool: if not self._is_subtype(item, self.orig_right): return False return True + + elif isinstance(self.right, UnionType): + # prune literals early to avoid nasty quadratic behavior which would otherwise arise when checking + # subtype relationships between slightly different narrowings of an Enum + # we achieve O(N+M) instead of O(N*M) + + fast_check: set[ProperType] = set() + + for item in _flattened(self.right.relevant_items()): + p_item = get_proper_type(item) + if isinstance(p_item, LiteralType): + fast_check.add(p_item) + elif isinstance(p_item, Instance): + if p_item.last_known_value is None: + fast_check.add(p_item) + else: + fast_check.add(p_item.last_known_value) + + for item in left.relevant_items(): + p_item = get_proper_type(item) + if p_item in fast_check: + continue + lit_type = mypy.typeops.simple_literal_type(p_item) + if lit_type in fast_check: + continue + if not self._is_subtype(item, self.orig_right): + return False + return True + return all(self._is_subtype(item, self.orig_right) for item in left.items) def visit_partial_type(self, left: PartialType) -> bool: @@ -844,6 +949,10 @@ def visit_type_type(self, left: TypeType) -> bool: if isinstance(right, TypeType): return self._is_subtype(left.item, right.item) if isinstance(right, CallableType): + if self.proper_subtype and not right.is_type_obj(): + # We can't accept `Type[X]` as a *proper* subtype of Callable[P, X] + # since this will break transitivity of subtyping. + return False # This is unsound, we don't check the __init__ signature. return self._is_subtype(left.item, right.ret_type) if isinstance(right, Instance): @@ -857,6 +966,10 @@ def visit_type_type(self, left: TypeType) -> bool: if isinstance(item, TypeVarType): item = get_proper_type(item.upper_bound) if isinstance(item, Instance): + if right.type.is_protocol and is_protocol_implementation( + item, right, proper_subtype=self.proper_subtype, class_obj=True + ): + return True metaclass = item.type.metaclass_type return metaclass is not None and self._is_subtype(metaclass, right) return False @@ -876,7 +989,11 @@ def pop_on_exit(stack: list[tuple[T, T]], left: T, right: T) -> Iterator[None]: def is_protocol_implementation( - left: Instance, right: Instance, proper_subtype: bool = False + left: Instance, + right: Instance, + proper_subtype: bool = False, + class_obj: bool = False, + skip: list[str] | None = None, ) -> bool: """Check whether 'left' implements the protocol 'right'. @@ -896,10 +1013,13 @@ def f(self) -> A: ... as well. """ assert right.type.is_protocol + if skip is None: + skip = [] # We need to record this check to generate protocol fine-grained dependencies. - TypeState.record_protocol_subtype_check(left.type, right.type) + type_state.record_protocol_subtype_check(left.type, right.type) # nominal subtyping currently ignores '__init__' and '__new__' signatures members_not_to_check = {"__init__", "__new__"} + members_not_to_check.update(skip) # Trivial check that circumvents the bug described in issue 9771: if left.type.is_protocol: members_right = set(right.type.protocol_members) - members_not_to_check @@ -919,7 +1039,23 @@ def f(self) -> A: ... # We always bind self to the subtype. (Similarly to nominal types). supertype = get_proper_type(find_member(member, right, left)) assert supertype is not None - subtype = get_proper_type(find_member(member, left, left)) + if member == "__call__" and class_obj: + # Special case: class objects always have __call__ that is just the constructor. + # TODO: move this helper function to typeops.py? + import mypy.checkmember + + def named_type(fullname: str) -> Instance: + return Instance(left.type.mro[-1], []) + + subtype: ProperType | None = mypy.checkmember.type_object_type( + left.type, named_type + ) + elif member == "__call__" and left.type.is_metaclass(): + # Special case: we want to avoid falling back to metaclass __call__ + # if constructor signature didn't match, this can cause many false negatives. + subtype = None + else: + subtype = get_proper_type(find_member(member, left, left, class_obj=class_obj)) # Useful for debugging: # print(member, 'of', left, 'has type', subtype) # print(member, 'of', right, 'has type', supertype) @@ -946,14 +1082,25 @@ def f(self) -> A: ... if isinstance(subtype, NoneType) and isinstance(supertype, CallableType): # We want __hash__ = None idiom to work even without --strict-optional return False - subflags = get_member_flags(member, left.type) - superflags = get_member_flags(member, right.type) + subflags = get_member_flags(member, left, class_obj=class_obj) + superflags = get_member_flags(member, right) if IS_SETTABLE in superflags: # Check opposite direction for settable attributes. if not is_subtype(supertype, subtype): return False - if (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): - return False + if not class_obj: + if IS_SETTABLE not in superflags: + if IS_CLASSVAR in superflags and IS_CLASSVAR not in subflags: + return False + elif (IS_CLASSVAR in subflags) != (IS_CLASSVAR in superflags): + return False + else: + if IS_VAR in superflags and IS_CLASSVAR not in subflags: + # Only class variables are allowed for class object access. + return False + if IS_CLASSVAR in superflags: + # This can be never matched by a class object. + return False if IS_SETTABLE in superflags and IS_SETTABLE not in subflags: return False # This rule is copied from nominal check in checker.py @@ -969,12 +1116,12 @@ def f(self) -> A: ... subtype_context=SubtypeContext(ignore_pos_arg_names=ignore_names), proper_subtype=proper_subtype, ) - TypeState.record_subtype_cache_entry(subtype_kind, left, right) + type_state.record_subtype_cache_entry(subtype_kind, left, right) return True def find_member( - name: str, itype: Instance, subtype: Type, is_operator: bool = False + name: str, itype: Instance, subtype: Type, is_operator: bool = False, class_obj: bool = False ) -> Type | None: """Find the type of member by 'name' in 'itype's TypeInfo. @@ -987,23 +1134,25 @@ def find_member( method = info.get_method(name) if method: if isinstance(method, Decorator): - return find_node_type(method.var, itype, subtype) + return find_node_type(method.var, itype, subtype, class_obj=class_obj) if method.is_property: assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) - return find_node_type(dec.var, itype, subtype) - return find_node_type(method, itype, subtype) + return find_node_type(dec.var, itype, subtype, class_obj=class_obj) + return find_node_type(method, itype, subtype, class_obj=class_obj) else: # don't have such method, maybe variable or decorator? node = info.get(name) v = node.node if node else None if isinstance(v, Var): - return find_node_type(v, itype, subtype) + return find_node_type(v, itype, subtype, class_obj=class_obj) if ( not v and name not in ["__getattr__", "__setattr__", "__getattribute__"] and not is_operator + and not class_obj + and itype.extra_attrs is None # skip ModuleType.__getattr__ ): for method_name in ("__getattribute__", "__getattr__"): # Normally, mypy assumes that instances that define __getattr__ have all @@ -1019,12 +1168,18 @@ def find_member( if isinstance(getattr_type, CallableType): return getattr_type.ret_type return getattr_type - if itype.type.fallback_to_any: + if itype.type.fallback_to_any or class_obj and itype.type.meta_fallback_to_any: return AnyType(TypeOfAny.special_form) + if isinstance(v, TypeInfo): + # PEP 544 doesn't specify anything about such use cases. So we just try + # to do something meaningful (at least we should not crash). + return TypeType(fill_typevars_with_any(v)) + if itype.extra_attrs and name in itype.extra_attrs.attrs: + return itype.extra_attrs.attrs[name] return None -def get_member_flags(name: str, info: TypeInfo) -> set[int]: +def get_member_flags(name: str, itype: Instance, class_obj: bool = False) -> set[int]: """Detect whether a member 'name' is settable, whether it is an instance or class variable, and whether it is class or static method. @@ -1035,35 +1190,53 @@ def get_member_flags(name: str, info: TypeInfo) -> set[int]: * IS_CLASS_OR_STATIC: set for methods decorated with @classmethod or with @staticmethod. """ + info = itype.type method = info.get_method(name) setattr_meth = info.get_method("__setattr__") if method: if isinstance(method, Decorator): if method.var.is_staticmethod or method.var.is_classmethod: return {IS_CLASS_OR_STATIC} + elif method.var.is_property: + return {IS_VAR} elif method.is_property: # this could be settable property assert isinstance(method, OverloadedFuncDef) dec = method.items[0] assert isinstance(dec, Decorator) if dec.var.is_settable_property or setattr_meth: - return {IS_SETTABLE} - return set() + return {IS_VAR, IS_SETTABLE} + else: + return {IS_VAR} + return set() # Just a regular method node = info.get(name) if not node: if setattr_meth: return {IS_SETTABLE} + if itype.extra_attrs and name in itype.extra_attrs.attrs: + flags = set() + if name not in itype.extra_attrs.immutable: + flags.add(IS_SETTABLE) + return flags return set() v = node.node # just a variable - if isinstance(v, Var) and not v.is_property: - flags = {IS_SETTABLE} + if isinstance(v, Var): + if v.is_property: + return {IS_VAR} + flags = {IS_VAR} + if not v.is_final: + flags.add(IS_SETTABLE) if v.is_classvar: flags.add(IS_CLASSVAR) + if class_obj and v.is_inferred: + flags.add(IS_CLASSVAR) return flags return set() -def find_node_type(node: Var | FuncBase, itype: Instance, subtype: Type) -> Type: +def find_node_type( + node: Var | FuncBase, itype: Instance, subtype: Type, class_obj: bool = False +) -> Type: """Find type of a variable or method 'node' (maybe also a decorated method). Apply type arguments from 'itype', and bind 'self' to 'subtype'. """ @@ -1075,6 +1248,8 @@ def find_node_type(node: Var | FuncBase, itype: Instance, subtype: Type) -> Type ) else: typ = node.type + if typ is not None: + typ = expand_self_type(node, typ, subtype) p_typ = get_proper_type(typ) if typ is None: return AnyType(TypeOfAny.from_error) @@ -1085,10 +1260,16 @@ def find_node_type(node: Var | FuncBase, itype: Instance, subtype: Type) -> Type and not node.is_staticmethod ): assert isinstance(p_typ, FunctionLike) - signature = bind_self( - p_typ, subtype, is_classmethod=isinstance(node, Var) and node.is_classmethod - ) - if node.is_property: + if class_obj and not ( + node.is_class if isinstance(node, FuncBase) else node.is_classmethod + ): + # Don't bind instance methods on class objects. + signature = p_typ + else: + signature = bind_self( + p_typ, subtype, is_classmethod=isinstance(node, Var) and node.is_classmethod + ) + if node.is_property and not class_obj: assert isinstance(signature, CallableType) typ = signature.ret_type else: @@ -1213,6 +1394,10 @@ def g(x: int) -> int: ... If the 'some_check' function is also symmetric, the two calls would be equivalent whether or not we check the args covariantly. """ + # Normalize both types before comparing them. + left = left.with_unpacked_kwargs() + right = right.with_unpacked_kwargs() + if is_compat_return is None: is_compat_return = is_compat @@ -1221,7 +1406,7 @@ def g(x: int) -> int: ... ignore_pos_arg_names = True # Non-type cannot be a subtype of type. - if right.is_type_obj() and not left.is_type_obj(): + if right.is_type_obj() and not left.is_type_obj() and not allow_partial_overlap: return False # A callable L is a subtype of a generic callable R if L is a @@ -1239,8 +1424,7 @@ def g(x: int) -> int: ... unified = unify_generic_callable(left, right, ignore_return=ignore_return) if unified is None: return False - else: - left = unified + left = unified # If we allow partial overlaps, we don't need to leave R generic: # if we can find even just a single typevar assignment which @@ -1276,9 +1460,21 @@ def g(x: int) -> int: ... ) +def are_trivial_parameters(param: Parameters | NormalizedCallableType) -> bool: + param_star = param.var_arg() + param_star2 = param.kw_arg() + return ( + param.arg_kinds == [ARG_STAR, ARG_STAR2] + and param_star is not None + and isinstance(get_proper_type(param_star.typ), AnyType) + and param_star2 is not None + and isinstance(get_proper_type(param_star2.typ), AnyType) + ) + + def are_parameters_compatible( - left: Parameters | CallableType, - right: Parameters | CallableType, + left: Parameters | NormalizedCallableType, + right: Parameters | NormalizedCallableType, *, is_compat: Callable[[Type, Type], bool], ignore_pos_arg_names: bool = False, @@ -1296,13 +1492,7 @@ def are_parameters_compatible( right_star2 = right.kw_arg() # Treat "def _(*a: Any, **kw: Any) -> X" similarly to "Callable[..., X]" - if ( - right.arg_kinds == [ARG_STAR, ARG_STAR2] - and right_star - and isinstance(get_proper_type(right_star.typ), AnyType) - and right_star2 - and isinstance(get_proper_type(right_star2.typ), AnyType) - ): + if are_trivial_parameters(right): return True # Match up corresponding arguments and check them for compatibility. In @@ -1499,11 +1689,11 @@ def new_is_compat(left: Type, right: Type) -> bool: def unify_generic_callable( - type: CallableType, - target: CallableType, + type: NormalizedCallableType, + target: NormalizedCallableType, ignore_return: bool, return_constraint_direction: int | None = None, -) -> CallableType | None: +) -> NormalizedCallableType | None: """Try to unify a generic callable type with another callable type. Return unified CallableType if successful; otherwise, return None. @@ -1535,12 +1725,16 @@ def report(*args: Any) -> None: nonlocal had_errors had_errors = True + # This function may be called by the solver, so we need to allow erased types here. + # We anyway allow checking subtyping between other types containing + # (probably also because solver needs subtyping). See also comment in + # ExpandTypeVisitor.visit_erased_type(). applied = mypy.applytype.apply_generic_arguments( - type, non_none_inferred_vars, report, context=target + type, non_none_inferred_vars, report, context=target, allow_erased_callables=True ) if had_errors: return None - return applied + return cast(NormalizedCallableType, applied) def try_restrict_literal_union(t: UnionType, s: Type) -> list[Type] | None: @@ -1564,35 +1758,32 @@ def try_restrict_literal_union(t: UnionType, s: Type) -> list[Type] | None: return new_items -def restrict_subtype_away(t: Type, s: Type, *, ignore_promotions: bool = False) -> Type: +def restrict_subtype_away(t: Type, s: Type) -> Type: """Return t minus s for runtime type assertions. If we can't determine a precise result, return a supertype of the ideal result (just t is a valid result). This is used for type inference of runtime type checks such as - isinstance(). Currently this just removes elements of a union type. + isinstance(). Currently, this just removes elements of a union type. """ p_t = get_proper_type(t) if isinstance(p_t, UnionType): new_items = try_restrict_literal_union(p_t, s) if new_items is None: new_items = [ - restrict_subtype_away(item, s, ignore_promotions=ignore_promotions) + restrict_subtype_away(item, s) for item in p_t.relevant_items() - if ( - isinstance(get_proper_type(item), AnyType) - or not covers_at_runtime(item, s, ignore_promotions) - ) + if (isinstance(get_proper_type(item), AnyType) or not covers_at_runtime(item, s)) ] return UnionType.make_union(new_items) - elif covers_at_runtime(t, s, ignore_promotions): + elif covers_at_runtime(t, s): return UninhabitedType() else: return t -def covers_at_runtime(item: Type, supertype: Type, ignore_promotions: bool) -> bool: +def covers_at_runtime(item: Type, supertype: Type) -> bool: """Will isinstance(item, supertype) always return True at runtime?""" item = get_proper_type(item) supertype = get_proper_type(supertype) @@ -1600,17 +1791,22 @@ def covers_at_runtime(item: Type, supertype: Type, ignore_promotions: bool) -> b # Since runtime type checks will ignore type arguments, erase the types. supertype = erase_type(supertype) if is_proper_subtype( - erase_type(item), supertype, ignore_promotions=ignore_promotions, erase_instances=True + erase_type(item), supertype, ignore_promotions=True, erase_instances=True ): return True - if isinstance(supertype, Instance) and supertype.type.is_protocol: - # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. - if is_proper_subtype(item, supertype, ignore_promotions=ignore_promotions): - return True - if isinstance(item, TypedDictType) and isinstance(supertype, Instance): - # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). - if supertype.type.fullname == "builtins.dict": - return True + if isinstance(supertype, Instance): + if supertype.type.is_protocol: + # TODO: Implement more robust support for runtime isinstance() checks, see issue #3827. + if is_proper_subtype(item, supertype, ignore_promotions=True): + return True + if isinstance(item, TypedDictType): + # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). + if supertype.type.fullname == "builtins.dict": + return True + elif isinstance(item, Instance) and supertype.type.fullname == "builtins.int": + # "int" covers all native int types + if item.type.fullname in MYPYC_NATIVE_INT_NAMES: + return True # TODO: Add more special cases. return False diff --git a/mypy/test/data.py b/mypy/test/data.py index e08b95fedbde..6e2ad198f614 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -81,13 +81,12 @@ def parse_test_case(case: DataDrivenTestCase) -> None: output_files.append((file_entry[0], re.compile(file_entry[1].rstrip(), re.S))) else: output_files.append(file_entry) - elif item.id in ("builtins", "builtins_py2"): + elif item.id == "builtins": # Use an alternative stub file for the builtins module. assert item.arg is not None mpath = join(os.path.dirname(case.file), item.arg) - fnam = "builtins.pyi" if item.id == "builtins" else "__builtin__.pyi" with open(mpath, encoding="utf8") as f: - files.append((join(base_path, fnam), f.read())) + files.append((join(base_path, "builtins.pyi"), f.read())) elif item.id == "typing": # Use an alternative stub file for the typing module. assert item.arg is not None @@ -170,9 +169,7 @@ def parse_test_case(case: DataDrivenTestCase) -> None: elif item.id == "triggered" and item.arg is None: triggered = item.data else: - raise ValueError( - f"Invalid section header {item.id} in {case.file} at line {item.line}" - ) + raise ValueError(f"Invalid section header {item.id} in {case.file}:{item.line}") if out_section_missing: raise ValueError(f"{case.file}, line {first_item.line}: Required output section not found") @@ -189,7 +186,7 @@ def parse_test_case(case: DataDrivenTestCase) -> None: ): raise ValueError( ( - "Stale modules after pass {} must be a subset of rechecked " "modules ({}:{})" + "Stale modules after pass {} must be a subset of rechecked modules ({}:{})" ).format(passnum, case.file, first_item.line) ) @@ -677,10 +674,12 @@ class DataFileCollector(pytest.Collector): parent: DataSuiteCollector @classmethod # We have to fight with pytest here: - def from_parent( # type: ignore[override] - cls, parent: DataSuiteCollector, *, name: str + def from_parent( + cls, parent: DataSuiteCollector, *, name: str # type: ignore[override] ) -> DataFileCollector: - return super().from_parent(parent, name=name) + collector = super().from_parent(parent, name=name) + assert isinstance(collector, DataFileCollector) + return collector def collect(self) -> Iterator[DataDrivenTestCase]: yield from split_test_cases( diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index cd3ae4b71071..145027404ff7 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -282,8 +282,14 @@ def num_skipped_suffix_lines(a1: list[str], a2: list[str]) -> int: def testfile_pyversion(path: str) -> tuple[int, int]: - if path.endswith("python310.test"): + if path.endswith("python311.test"): + return 3, 11 + elif path.endswith("python310.test"): return 3, 10 + elif path.endswith("python39.test"): + return 3, 9 + elif path.endswith("python38.test"): + return 3, 8 else: return defaults.PYTHON3_VERSION @@ -369,12 +375,15 @@ def parse_options( if targets: # TODO: support specifying targets via the flags pragma raise RuntimeError("Specifying targets via the flags pragma is not supported.") + if "--show-error-codes" not in flag_list: + options.hide_error_codes = True else: flag_list = [] options = Options() # TODO: Enable strict optional in test cases by default (requires *many* test case changes) options.strict_optional = False options.error_summary = False + options.hide_error_codes = True # Allow custom python version to override testfile_pyversion. if all(flag.split("=")[0] not in ["--python-version", "-2", "--py2"] for flag in flag_list): diff --git a/mypy/test/test_find_sources.py b/mypy/test/test_find_sources.py index 97a2ed664454..21ba0903a824 100644 --- a/mypy/test/test_find_sources.py +++ b/mypy/test/test_find_sources.py @@ -356,7 +356,8 @@ def test_find_sources_exclude(self) -> None: "/kg", "/g.py", "/bc", - "/xxx/pkg/a2/b/f.py" "xxx/pkg/a2/b/f.py", + "/xxx/pkg/a2/b/f.py", + "xxx/pkg/a2/b/f.py", ] big_exclude2 = ["|".join(big_exclude1)] for big_exclude in [big_exclude1, big_exclude2]: diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index cae427de2f96..4fe2ee6393c0 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -10,6 +10,7 @@ from mypy.build import Graph from mypy.errors import CompileError from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths +from mypy.options import TYPE_VAR_TUPLE, UNPACK from mypy.semanal_main import core_modules from mypy.test.config import test_data_prefix, test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite, FileOperation, module_from_path @@ -26,7 +27,7 @@ ) try: - import lxml # type: ignore + import lxml # type: ignore[import] except ImportError: lxml = None @@ -43,6 +44,8 @@ typecheck_files.remove("check-python39.test") if sys.version_info < (3, 10): typecheck_files.remove("check-python310.test") +if sys.version_info < (3, 11): + typecheck_files.remove("check-python311.test") # Special tests for platforms with case-insensitive filesystems. if sys.platform not in ("darwin", "win32"): @@ -110,7 +113,8 @@ def run_case_once( # Parse options after moving files (in case mypy.ini is being moved). options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True - options.enable_incomplete_features = True + if not testcase.name.endswith("_no_incomplete"): + options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] options.show_traceback = True # Enable some options automatically based on test file name. @@ -119,7 +123,9 @@ def run_case_once( if "columns" in testcase.file: options.show_column_numbers = True if "errorcodes" in testcase.file: - options.show_error_codes = True + options.hide_error_codes = False + if "abstract" not in testcase.file: + options.allow_empty_bodies = not testcase.name.endswith("_no_empty") if incremental_step and options.incremental: # Don't overwrite # flags: --no-incremental in incremental test cases diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 14c985e1d9a9..2e8b0dc9a1cd 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -20,7 +20,7 @@ ) try: - import lxml # type: ignore + import lxml # type: ignore[import] except ImportError: lxml = None @@ -57,6 +57,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: args.append("--show-traceback") if "--error-summary" not in args: args.append("--no-error-summary") + if "--show-error-codes" not in args: + args.append("--hide-error-codes") + if "--disallow-empty-bodies" not in args: + args.append("--allow-empty-bodies") # Type check the program. fixed = [python3_path, "-m", "mypy"] env = os.environ.copy() @@ -65,12 +69,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: env["PYTHONPATH"] = PREFIX if os.path.isdir(extra_path): env["PYTHONPATH"] += os.pathsep + extra_path + cwd = os.path.join(test_temp_dir, custom_cwd or "") + args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args] process = subprocess.Popen( - fixed + args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=os.path.join(test_temp_dir, custom_cwd or ""), - env=env, + fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env ) outb, errb = process.communicate() result = process.returncode diff --git a/mypy/test/testconstraints.py b/mypy/test/testconstraints.py index 4aa9fdb83a29..b46f31327150 100644 --- a/mypy/test/testconstraints.py +++ b/mypy/test/testconstraints.py @@ -5,7 +5,7 @@ from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints from mypy.test.helpers import Suite from mypy.test.typefixture import TypeFixture -from mypy.types import Instance, TypeList, UnpackType +from mypy.types import Instance, TupleType, UnpackType class ConstraintsSuite(Suite): @@ -19,7 +19,7 @@ def test_basic_type_variable(self) -> None: fx = self.fx for direction in [SUBTYPE_OF, SUPERTYPE_OF]: assert infer_constraints(fx.gt, fx.ga, direction) == [ - Constraint(type_var=fx.t.id, op=direction, target=fx.a) + Constraint(type_var=fx.t, op=direction, target=fx.a) ] @pytest.mark.xfail @@ -27,13 +27,19 @@ def test_basic_type_var_tuple_subtype(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUBTYPE_OF - ) == [Constraint(type_var=fx.ts.id, op=SUBTYPE_OF, target=TypeList([fx.a, fx.b]))] + ) == [ + Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)) + ] def test_basic_type_var_tuple(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF - ) == [Constraint(type_var=fx.ts.id, op=SUPERTYPE_OF, target=TypeList([fx.a, fx.b]))] + ) == [ + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) + ) + ] def test_type_var_tuple_with_prefix_and_suffix(self) -> None: fx = self.fx @@ -44,7 +50,112 @@ def test_type_var_tuple_with_prefix_and_suffix(self) -> None: SUPERTYPE_OF, ) ) == { - Constraint(type_var=fx.t.id, op=SUPERTYPE_OF, target=fx.a), - Constraint(type_var=fx.ts.id, op=SUPERTYPE_OF, target=TypeList([fx.b, fx.c])), - Constraint(type_var=fx.s.id, op=SUPERTYPE_OF, target=fx.d), + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint( + type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) + ), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.d), + } + + def test_unpack_homogenous_tuple(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gvi, [UnpackType(Instance(fx.std_tuplei, [fx.t]))]), + Instance(fx.gvi, [fx.a, fx.b]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b), } + + def test_unpack_homogenous_tuple_with_prefix_and_suffix(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance(fx.gv2i, [fx.t, UnpackType(Instance(fx.std_tuplei, [fx.s])), fx.u]), + Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c), + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), + } + + def test_unpack_tuple(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance( + fx.gvi, + [ + UnpackType( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])) + ) + ], + ), + Instance(fx.gvi, [fx.a, fx.b]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b), + } + + def test_unpack_with_prefix_and_suffix(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance( + fx.gv2i, + [ + fx.u, + UnpackType( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])) + ), + fx.u, + ], + ), + Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), + SUPERTYPE_OF, + ) + ) == { + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b), + Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c), + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), + } + + def test_unpack_tuple_length_non_match(self) -> None: + fx = self.fx + assert set( + infer_constraints( + Instance( + fx.gv2i, + [ + fx.u, + UnpackType( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])) + ), + fx.u, + ], + ), + Instance(fx.gv2i, [fx.a, fx.b, fx.d]), + SUPERTYPE_OF, + ) + # We still get constraints on the prefix/suffix in this case. + ) == { + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), + Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), + } + + def test_var_length_tuple_with_fixed_length_tuple(self) -> None: + fx = self.fx + assert not infer_constraints( + TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])), + Instance(fx.std_tuplei, [fx.a]), + SUPERTYPE_OF, + ) diff --git a/mypy/test/testdaemon.py b/mypy/test/testdaemon.py index 04a9c387b68a..e3cdf44d89f2 100644 --- a/mypy/test/testdaemon.py +++ b/mypy/test/testdaemon.py @@ -81,6 +81,8 @@ def parse_script(input: list[str]) -> list[list[str]]: def run_cmd(input: str) -> tuple[int, str]: + if input[1:].startswith("mypy run --") and "--show-error-codes" not in input: + input += " --hide-error-codes" if input.startswith("dmypy "): input = sys.executable + " -m mypy." + input if input.startswith("mypy "): diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index 7cbe619bad09..3343762cfaaf 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -4,7 +4,6 @@ import os from collections import defaultdict -from typing import DefaultDict from mypy import build from mypy.errors import CompileError @@ -16,7 +15,7 @@ from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options from mypy.types import Type -from mypy.typestate import TypeState +from mypy.typestate import type_state # Only dependencies in these modules are dumped dumped_modules = ["__main__", "pkg", "pkg.mod"] @@ -34,13 +33,14 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: options.cache_dir = os.devnull options.export_types = True options.preserve_asts = True + options.allow_empty_bodies = True messages, files, type_map = self.build(src, options) a = messages if files is None or type_map is None: if not a: a = ["Unknown compile error (likely syntax error in test case or fixture)"] else: - deps: DefaultDict[str, set[str]] = defaultdict(set) + deps: defaultdict[str, set[str]] = defaultdict(set) for module in files: if ( module in dumped_modules @@ -54,7 +54,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: for source in new_deps: deps[source].update(new_deps[source]) - TypeState.add_all_protocol_deps(deps) + type_state.add_all_protocol_deps(deps) for source, targets in sorted(deps.items()): if source.startswith((" tuple[list[str], dict[str, Myp options.show_traceback = True options.cache_dir = os.devnull options.python_version = PYTHON3_VERSION + options.allow_empty_bodies = True try: result = build.build( sources=[BuildSource("main", None, source)], diff --git a/mypy/test/testerrorstream.py b/mypy/test/testerrorstream.py index bae26b148a79..4b98f10fc9ca 100644 --- a/mypy/test/testerrorstream.py +++ b/mypy/test/testerrorstream.py @@ -25,6 +25,7 @@ def test_error_stream(testcase: DataDrivenTestCase) -> None: """ options = Options() options.show_traceback = True + options.hide_error_codes = True logged_messages: list[str] = [] diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index 1cc8ba6198d1..b19c49bf60bc 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -17,6 +17,7 @@ import os import re import sys +import unittest from typing import Any, cast import pytest @@ -28,8 +29,9 @@ from mypy.errors import CompileError from mypy.find_sources import create_source_list from mypy.modulefinder import BuildSource -from mypy.options import Options +from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options from mypy.server.mergecheck import check_consistency +from mypy.server.update import sort_messages_preserving_file_order from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite, DeleteFile, UpdateFile from mypy.test.helpers import ( @@ -151,6 +153,9 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo options.use_fine_grained_cache = self.use_cache and not build_cache options.cache_fine_grained = self.use_cache options.local_partial_types = True + options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] + # Treat empty bodies safely for these test cases. + options.allow_empty_bodies = not testcase.name.endswith("_no_empty") if re.search("flags:.*--follow-imports", source) is None: # Override the default for follow_imports options.follow_imports = "error" @@ -366,3 +371,70 @@ def get_inspect(self, program_text: str, incremental_step: int) -> list[tuple[st def normalize_messages(messages: list[str]) -> list[str]: return [re.sub("^tmp" + re.escape(os.sep), "", message) for message in messages] + + +class TestMessageSorting(unittest.TestCase): + def test_simple_sorting(self) -> None: + msgs = ['x.py:1: error: "int" not callable', 'foo/y.py:123: note: "X" not defined'] + old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] + assert sort_messages_preserving_file_order(msgs, old_msgs) == list(reversed(msgs)) + assert sort_messages_preserving_file_order(list(reversed(msgs)), old_msgs) == list( + reversed(msgs) + ) + + def test_long_form_sorting(self) -> None: + # Multi-line errors should be sorted together and not split. + msg1 = [ + 'x.py:1: error: "int" not callable', + "and message continues (x: y)", + " 1()", + " ^~~", + ] + msg2 = [ + 'foo/y.py: In function "f":', + 'foo/y.py:123: note: "X" not defined', + "and again message continues", + ] + old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] + assert sort_messages_preserving_file_order(msg1 + msg2, old_msgs) == msg2 + msg1 + assert sort_messages_preserving_file_order(msg2 + msg1, old_msgs) == msg2 + msg1 + + def test_mypy_error_prefix(self) -> None: + # Some errors don't have a file and start with "mypy: ". These + # shouldn't be sorted together with file-specific errors. + msg1 = 'x.py:1: error: "int" not callable' + msg2 = 'foo/y:123: note: "X" not defined' + msg3 = "mypy: Error not associated with a file" + old_msgs = [ + "mypy: Something wrong", + 'foo/y:12: note: "Y" not defined', + 'x.py:8: error: "str" not callable', + ] + assert sort_messages_preserving_file_order([msg1, msg2, msg3], old_msgs) == [ + msg2, + msg1, + msg3, + ] + assert sort_messages_preserving_file_order([msg3, msg2, msg1], old_msgs) == [ + msg2, + msg1, + msg3, + ] + + def test_new_file_at_the_end(self) -> None: + msg1 = 'x.py:1: error: "int" not callable' + msg2 = 'foo/y.py:123: note: "X" not defined' + new1 = "ab.py:3: error: Problem: error" + new2 = "aaa:3: error: Bad" + old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] + assert sort_messages_preserving_file_order([msg1, msg2, new1], old_msgs) == [ + msg2, + msg1, + new1, + ] + assert sort_messages_preserving_file_order([new1, msg1, msg2, new2], old_msgs) == [ + msg2, + msg1, + new1, + new2, + ] diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py index cf6d648dba5a..08926c179623 100644 --- a/mypy/test/testinfer.py +++ b/mypy/test/testinfer.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import Tuple - from mypy.argmap import map_actuals_to_formals from mypy.checker import DisjointDict, group_comparison_operands from mypy.literals import Key @@ -46,15 +44,18 @@ def test_too_many_caller_args(self) -> None: def test_tuple_star(self) -> None: any_type = AnyType(TypeOfAny.special_form) - self.assert_vararg_map([ARG_STAR], [ARG_POS], [[0]], self.tuple(any_type)) + self.assert_vararg_map([ARG_STAR], [ARG_POS], [[0]], self.make_tuple(any_type)) self.assert_vararg_map( - [ARG_STAR], [ARG_POS, ARG_POS], [[0], [0]], self.tuple(any_type, any_type) + [ARG_STAR], [ARG_POS, ARG_POS], [[0], [0]], self.make_tuple(any_type, any_type) ) self.assert_vararg_map( - [ARG_STAR], [ARG_POS, ARG_OPT, ARG_OPT], [[0], [0], []], self.tuple(any_type, any_type) + [ARG_STAR], + [ARG_POS, ARG_OPT, ARG_OPT], + [[0], [0], []], + self.make_tuple(any_type, any_type), ) - def tuple(self, *args: Type) -> TupleType: + def make_tuple(self, *args: Type) -> TupleType: return TupleType(list(args), TypeFixture().std_tuple) def test_named_args(self) -> None: @@ -92,7 +93,7 @@ def test_special_cases(self) -> None: def assert_map( self, caller_kinds_: list[ArgKind | str], - callee_kinds_: list[ArgKind | Tuple[ArgKind, str]], + callee_kinds_: list[ArgKind | tuple[ArgKind, str]], expected: list[list[int]], ) -> None: caller_kinds, caller_names = expand_caller_kinds(caller_kinds_) diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index 32586623640d..595aba49d8b7 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -113,6 +113,7 @@ def build(self, source: str, testcase: DataDrivenTestCase) -> BuildResult | None options.use_builtins_fixtures = True options.export_types = True options.show_traceback = True + options.allow_empty_bodies = True main_path = os.path.join(test_temp_dir, "main") with open(main_path, "w", encoding="utf8") as f: f.write(source) diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py index f8990897d072..6a2d1e145251 100644 --- a/mypy/test/testparse.py +++ b/mypy/test/testparse.py @@ -32,6 +32,7 @@ def test_parser(testcase: DataDrivenTestCase) -> None: The argument contains the description of the test case. """ options = Options() + options.hide_error_codes = True if testcase.file.endswith("python310.test"): options.python_version = (3, 10) diff --git a/mypy/test/testpep561.py b/mypy/test/testpep561.py index e4123bfdff17..1602bae6a51f 100644 --- a/mypy/test/testpep561.py +++ b/mypy/test/testpep561.py @@ -107,7 +107,7 @@ def test_pep561(testcase: DataDrivenTestCase) -> None: f.write(f"{s}\n") cmd_line.append(program) - cmd_line.extend(["--no-error-summary"]) + cmd_line.extend(["--no-error-summary", "--hide-error-codes"]) if python_executable != sys.executable: cmd_line.append(f"--python-executable={python_executable}") diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py index a5eaea769515..02dd11655382 100644 --- a/mypy/test/testpythoneval.py +++ b/mypy/test/testpythoneval.py @@ -52,6 +52,8 @@ def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None "--no-strict-optional", "--no-silence-site-packages", "--no-error-summary", + "--hide-error-codes", + "--allow-empty-bodies", ] interpreter = python3_path mypy_cmdline.append(f"--python-version={'.'.join(map(str, PYTHON3_VERSION))}") @@ -79,7 +81,11 @@ def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None # Normalize paths so that the output is the same on Windows and Linux/macOS. line = line.replace(test_temp_dir + os.sep, test_temp_dir + "/") output.append(line.rstrip("\r\n")) - if returncode == 0: + if returncode > 1 and not testcase.output: + # Either api.run() doesn't work well in case of a crash, or pytest interferes with it. + # Tweak output to prevent tests with empty expected output to pass in case of a crash. + output.append("!!! Mypy crashed !!!") + if returncode == 0 and not output: # Execute the program. proc = subprocess.run( [interpreter, "-Wignore", program], cwd=test_temp_dir, capture_output=True diff --git a/mypy/test/testreports.py b/mypy/test/testreports.py index 28c4ae5638a0..a422b4bb2a7b 100644 --- a/mypy/test/testreports.py +++ b/mypy/test/testreports.py @@ -7,7 +7,7 @@ from mypy.test.helpers import Suite, assert_equal try: - import lxml # type: ignore + import lxml # type: ignore[import] except ImportError: lxml = None @@ -22,7 +22,7 @@ def test_get_line_rate(self) -> None: @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_as_xml(self) -> None: - import lxml.etree as etree # type: ignore + import lxml.etree as etree # type: ignore[import] cobertura_package = CoberturaPackage("foobar") cobertura_package.covered_lines = 21 diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py index 4f1e9d8460dd..71ebc43df8c2 100644 --- a/mypy/test/testsemanal.py +++ b/mypy/test/testsemanal.py @@ -11,7 +11,7 @@ from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import TypeInfo -from mypy.options import Options +from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import ( @@ -46,7 +46,7 @@ def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Opti options.semantic_analysis_only = True options.show_traceback = True options.python_version = PYTHON3_VERSION - options.enable_incomplete_features = True + options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] return options @@ -202,7 +202,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: for f in result.files.values(): for n in f.names.values(): if isinstance(n.node, TypeInfo): - assert n.fullname is not None + assert n.fullname typeinfos[n.fullname] = n.node # The output is the symbol table converted into a string. @@ -220,7 +220,7 @@ class TypeInfoMap(Dict[str, TypeInfo]): def __str__(self) -> str: a: list[str] = ["TypeInfoMap("] for x, y in sorted(self.items()): - if isinstance(x, str) and ( + if ( not x.startswith("builtins.") and not x.startswith("typing.") and not x.startswith("abc.") diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py index 6ff328d050b3..d6c585ef4aaa 100644 --- a/mypy/test/testsolve.py +++ b/mypy/test/testsolve.py @@ -138,7 +138,7 @@ def assert_solve( assert_equal(str(actual), str(res)) def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: - return Constraint(type_var.id, SUPERTYPE_OF, bound) + return Constraint(type_var, SUPERTYPE_OF, bound) def subc(self, type_var: TypeVarType, bound: Type) -> Constraint: - return Constraint(type_var.id, SUBTYPE_OF, bound) + return Constraint(type_var, SUBTYPE_OF, bound) diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py index 7e3993252b6c..c7b576f89389 100644 --- a/mypy/test/teststubgen.py +++ b/mypy/test/teststubgen.py @@ -28,6 +28,7 @@ Options, collect_build_targets, generate_stubs, + get_sig_generators, is_blacklisted_path, is_non_library_module, mypy_options, @@ -803,7 +804,14 @@ def test_generate_c_type_stub_no_crash_for_object(self) -> None: output: list[str] = [] mod = ModuleType("module", "") # any module is fine imports: list[str] = [] - generate_c_type_stub(mod, "alias", object, output, imports) + generate_c_type_stub( + mod, + "alias", + object, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(imports, []) assert_equal(output[0], "class alias:") @@ -815,7 +823,14 @@ class TestClassVariableCls: output: list[str] = [] imports: list[str] = [] mod = ModuleType("module", "") # any module is fine - generate_c_type_stub(mod, "C", TestClassVariableCls, output, imports) + generate_c_type_stub( + mod, + "C", + TestClassVariableCls, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(imports, []) assert_equal(output, ["class C:", " x: ClassVar[int] = ..."]) @@ -826,7 +841,14 @@ class TestClass(KeyError): output: list[str] = [] imports: list[str] = [] mod = ModuleType("module, ") - generate_c_type_stub(mod, "C", TestClass, output, imports) + generate_c_type_stub( + mod, + "C", + TestClass, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["class C(KeyError): ..."]) assert_equal(imports, []) @@ -834,7 +856,14 @@ def test_generate_c_type_inheritance_same_module(self) -> None: output: list[str] = [] imports: list[str] = [] mod = ModuleType(TestBaseClass.__module__, "") - generate_c_type_stub(mod, "C", TestClass, output, imports) + generate_c_type_stub( + mod, + "C", + TestClass, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["class C(TestBaseClass): ..."]) assert_equal(imports, []) @@ -847,7 +876,14 @@ class TestClass(argparse.Action): output: list[str] = [] imports: list[str] = [] mod = ModuleType("module", "") - generate_c_type_stub(mod, "C", TestClass, output, imports) + generate_c_type_stub( + mod, + "C", + TestClass, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["class C(argparse.Action): ..."]) assert_equal(imports, ["import argparse"]) @@ -858,7 +894,14 @@ class TestClass(type): output: list[str] = [] imports: list[str] = [] mod = ModuleType("module", "") - generate_c_type_stub(mod, "C", TestClass, output, imports) + generate_c_type_stub( + mod, + "C", + TestClass, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["class C(type): ..."]) assert_equal(imports, []) @@ -873,7 +916,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) assert_equal(imports, []) @@ -889,7 +939,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) assert_equal(imports, []) @@ -904,11 +961,54 @@ def test(cls, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="cls", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="cls", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) - assert_equal(output, ["def test(cls, *args, **kwargs) -> Any: ..."]) + assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs) -> Any: ..."]) assert_equal(imports, []) + def test_generate_c_type_classmethod_with_overloads(self) -> None: + class TestClass: + @classmethod + def test(self, arg0: str) -> None: + """ + test(cls, arg0: str) + test(cls, arg0: int) + """ + pass + + output: list[str] = [] + imports: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + generate_c_function_stub( + mod, + "test", + TestClass.test, + output, + imports, + self_var="cls", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), + ) + assert_equal( + output, + [ + "@overload", + "@classmethod", + "def test(cls, arg0: str) -> Any: ...", + "@overload", + "@classmethod", + "def test(cls, arg0: int) -> Any: ...", + ], + ) + assert_equal(imports, ["from typing import overload"]) + def test_generate_c_type_with_docstring_empty_default(self) -> None: class TestClass: def test(self, arg0: str = "") -> None: @@ -920,7 +1020,14 @@ def test(self, arg0: str = "") -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: str = ...) -> Any: ..."]) assert_equal(imports, []) @@ -937,7 +1044,14 @@ def test(arg0: str) -> None: output: list[str] = [] imports: list[str] = [] mod = ModuleType(self.__module__, "") - generate_c_function_stub(mod, "test", test, output, imports) + generate_c_function_stub( + mod, + "test", + test, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["def test(arg0: argparse.Action) -> Any: ..."]) assert_equal(imports, ["import argparse"]) @@ -955,7 +1069,14 @@ def test(arg0: str) -> None: output: list[str] = [] imports: list[str] = [] mod = ModuleType("argparse", "") - generate_c_function_stub(mod, "test", test, output, imports) + generate_c_function_stub( + mod, + "test", + test, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["def test(arg0: Action) -> Any: ..."]) assert_equal(imports, []) @@ -970,7 +1091,14 @@ def test(arg0: str) -> None: output: list[str] = [] imports: list[str] = [] mod = ModuleType(self.__module__, "") - generate_c_function_stub(mod, "test", test, output, imports) + generate_c_function_stub( + mod, + "test", + test, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["def test(arg0: str) -> argparse.Action: ..."]) assert_equal(imports, ["import argparse"]) @@ -987,7 +1115,14 @@ def test(arg0: str) -> None: output: list[str] = [] imports: list[str] = [] mod = ModuleType("argparse", "") - generate_c_function_stub(mod, "test", test, output, imports) + generate_c_function_stub( + mod, + "test", + test, + output, + imports, + sig_generators=get_sig_generators(parse_options([])), + ) assert_equal(output, ["def test(arg0: str) -> Action: ..."]) assert_equal(imports, []) @@ -1052,7 +1187,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: List[int]) -> Any: ..."]) assert_equal(imports, []) @@ -1068,7 +1210,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[str,int]) -> Any: ..."]) assert_equal(imports, []) @@ -1084,7 +1233,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[str,List[int]]) -> Any: ..."]) assert_equal(imports, []) @@ -1100,7 +1256,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[argparse.Action,int]) -> Any: ..."]) assert_equal(imports, ["import argparse"]) @@ -1116,7 +1279,14 @@ def test(self, arg0: str) -> None: imports: list[str] = [] mod = ModuleType(TestClass.__module__, "") generate_c_function_stub( - mod, "test", TestClass.test, output, imports, self_var="self", class_name="TestClass" + mod, + "test", + TestClass.test, + output, + imports, + self_var="self", + class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[str,argparse.Action]) -> Any: ..."]) assert_equal(imports, ["import argparse"]) @@ -1144,6 +1314,7 @@ def __init__(self, arg0: str) -> None: imports, self_var="self", class_name="TestClass", + sig_generators=get_sig_generators(parse_options([])), ) assert_equal( output, @@ -1153,7 +1324,7 @@ def __init__(self, arg0: str) -> None: "@overload", "def __init__(self, arg0: str, arg1: str) -> None: ...", "@overload", - "def __init__(*args, **kwargs) -> Any: ...", + "def __init__(self, *args, **kwargs) -> Any: ...", ], ) assert_equal(set(imports), {"from typing import overload"}) diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index f15650811dc5..6bb4dfb2c937 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -302,7 +302,7 @@ def test_arg_kind(self) -> Iterator[Case]: ) @collect_cases - def test_default_value(self) -> Iterator[Case]: + def test_default_presence(self) -> Iterator[Case]: yield Case( stub="def f1(text: str = ...) -> None: ...", runtime="def f1(text = 'asdf'): pass", @@ -336,6 +336,59 @@ def f6(text: _T = ...) -> None: ... error="f6", ) + @collect_cases + def test_default_value(self) -> Iterator[Case]: + yield Case( + stub="def f1(text: str = 'x') -> None: ...", + runtime="def f1(text = 'y'): pass", + error="f1", + ) + yield Case( + stub='def f2(text: bytes = b"x\'") -> None: ...', + runtime='def f2(text = b"x\'"): pass', + error=None, + ) + yield Case( + stub='def f3(text: bytes = b"y\'") -> None: ...', + runtime='def f3(text = b"x\'"): pass', + error="f3", + ) + yield Case( + stub="def f4(text: object = 1) -> None: ...", + runtime="def f4(text = 1.0): pass", + error="f4", + ) + yield Case( + stub="def f5(text: object = True) -> None: ...", + runtime="def f5(text = 1): pass", + error="f5", + ) + yield Case( + stub="def f6(text: object = True) -> None: ...", + runtime="def f6(text = True): pass", + error=None, + ) + yield Case( + stub="def f7(text: object = not True) -> None: ...", + runtime="def f7(text = False): pass", + error=None, + ) + yield Case( + stub="def f8(text: object = not True) -> None: ...", + runtime="def f8(text = True): pass", + error="f8", + ) + yield Case( + stub="def f9(text: object = {1: 2}) -> None: ...", + runtime="def f9(text = {1: 3}): pass", + error="f9", + ) + yield Case( + stub="def f10(text: object = [1, 2]) -> None: ...", + runtime="def f10(text = [1, 2]): pass", + error=None, + ) + @collect_cases def test_static_class_method(self) -> Iterator[Case]: yield Case( @@ -975,7 +1028,7 @@ def test_all_in_stub_not_at_runtime(self) -> Iterator[Case]: @collect_cases def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: - # We *should* emit an error with the module name itself, + # We *should* emit an error with the module name itself + __all__, # if the stub *does* define __all__, # but the stub's __all__ is inconsistent with the runtime's __all__ yield Case( @@ -987,7 +1040,7 @@ def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: __all__ = [] foo = 'foo' """, - error="", + error="__all__", ) @collect_cases @@ -1029,6 +1082,9 @@ def test_missing_no_runtime_all(self) -> Iterator[Case]: yield Case(stub="", runtime="import sys", error=None) yield Case(stub="", runtime="def g(): ...", error="g") yield Case(stub="", runtime="CONSTANT = 0", error="CONSTANT") + yield Case(stub="", runtime="import re; constant = re.compile('foo')", error="constant") + yield Case(stub="", runtime="from json.scanner import NUMBER_RE", error=None) + yield Case(stub="", runtime="from string import ascii_letters", error=None) @collect_cases def test_non_public_1(self) -> Iterator[Case]: @@ -1098,6 +1154,49 @@ def __mangle_bad(self, text): pass """, error="X.__mangle_bad", ) + yield Case( + stub=""" + class Klass: + class __Mangled1: + class __Mangled2: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class Klass: + class __Mangled1: + class __Mangled2: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="Klass.__Mangled1.__Mangled2.__mangle_bad", + ) + yield Case( + stub=""" + class __Dunder__: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class __Dunder__: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="__Dunder__.__mangle_bad", + ) + yield Case( + stub=""" + class _Private: + def __mangle_good(self, text: str) -> None: ... + def __mangle_bad(self, number: int) -> None: ... + """, + runtime=""" + class _Private: + def __mangle_good(self, text): pass + def __mangle_bad(self, text): pass + """, + error="_Private.__mangle_bad", + ) @collect_cases def test_mro(self) -> Iterator[Case]: @@ -1271,6 +1370,69 @@ def test_type_var(self) -> Iterator[Case]: ) yield Case(stub="C = ParamSpec('C')", runtime="C = ParamSpec('C')", error=None) + @collect_cases + def test_metaclass_match(self) -> Iterator[Case]: + yield Case(stub="class Meta(type): ...", runtime="class Meta(type): ...", error=None) + yield Case(stub="class A0: ...", runtime="class A0: ...", error=None) + yield Case( + stub="class A1(metaclass=Meta): ...", + runtime="class A1(metaclass=Meta): ...", + error=None, + ) + yield Case(stub="class A2: ...", runtime="class A2(metaclass=Meta): ...", error="A2") + yield Case(stub="class A3(metaclass=Meta): ...", runtime="class A3: ...", error="A3") + + # Explicit `type` metaclass can always be added in any part: + yield Case( + stub="class T1(metaclass=type): ...", + runtime="class T1(metaclass=type): ...", + error=None, + ) + yield Case(stub="class T2: ...", runtime="class T2(metaclass=type): ...", error=None) + yield Case(stub="class T3(metaclass=type): ...", runtime="class T3: ...", error=None) + + # Explicit check that `_protected` names are also supported: + yield Case(stub="class _P1(type): ...", runtime="class _P1(type): ...", error=None) + yield Case(stub="class P2: ...", runtime="class P2(metaclass=_P1): ...", error="P2") + + # With inheritance: + yield Case( + stub=""" + class I1(metaclass=Meta): ... + class S1(I1): ... + """, + runtime=""" + class I1(metaclass=Meta): ... + class S1(I1): ... + """, + error=None, + ) + yield Case( + stub=""" + class I2(metaclass=Meta): ... + class S2: ... # missing inheritance + """, + runtime=""" + class I2(metaclass=Meta): ... + class S2(I2): ... + """, + error="S2", + ) + + @collect_cases + def test_metaclass_abcmeta(self) -> Iterator[Case]: + # Handling abstract metaclasses is special: + yield Case(stub="from abc import ABCMeta", runtime="from abc import ABCMeta", error=None) + yield Case( + stub="class A1(metaclass=ABCMeta): ...", + runtime="class A1(metaclass=ABCMeta): ...", + error=None, + ) + # Stubs cannot miss abstract metaclass: + yield Case(stub="class A2: ...", runtime="class A2(metaclass=ABCMeta): ...", error="A2") + # But, stubs can add extra abstract metaclass, this might be a typing hack: + yield Case(stub="class A3(metaclass=ABCMeta): ...", runtime="class A3: ...", error=None) + @collect_cases def test_abstract_methods(self) -> Iterator[Case]: yield Case( @@ -1319,6 +1481,7 @@ def some(self) -> None: ... @collect_cases def test_abstract_properties(self) -> Iterator[Case]: + # TODO: test abstract properties with setters yield Case( stub="from abc import abstractmethod", runtime="from abc import abstractmethod", @@ -1328,6 +1491,7 @@ def test_abstract_properties(self) -> Iterator[Case]: yield Case( stub=""" class AP1: + @property def some(self) -> int: ... """, runtime=""" @@ -1338,6 +1502,19 @@ def some(self) -> int: ... """, error="AP1.some", ) + yield Case( + stub=""" + class AP1_2: + def some(self) -> int: ... # missing `@property` decorator + """, + runtime=""" + class AP1_2: + @property + @abstractmethod + def some(self) -> int: ... + """, + error="AP1_2.some", + ) yield Case( stub=""" class AP2: @@ -1384,9 +1561,9 @@ def test_output(self) -> None: expected = ( f'error: {TEST_MODULE_NAME}.bad is inconsistent, stub argument "number" differs ' 'from runtime argument "num"\n' - f"Stub: at line 1 in file {TEST_MODULE_NAME}.pyi\n" + f"Stub: in file {TEST_MODULE_NAME}.pyi:1\n" "def (number: builtins.int, text: builtins.str)\n" - f"Runtime: at line 1 in file {TEST_MODULE_NAME}.py\ndef (num, text)\n\n" + f"Runtime: in file {TEST_MODULE_NAME}.py:1\ndef (num, text)\n\n" "Found 1 error (checked 1 module)\n" ) assert remove_color_code(output) == expected @@ -1481,13 +1658,13 @@ def test_mypy_build(self) -> None: output = run_stubtest(stub="+", runtime="", options=[]) assert remove_color_code(output) == ( "error: not checking stubs due to failed mypy compile:\n{}.pyi:1: " - "error: invalid syntax\n".format(TEST_MODULE_NAME) + "error: invalid syntax [syntax]\n".format(TEST_MODULE_NAME) ) output = run_stubtest(stub="def f(): ...\ndef f(): ...", runtime="", options=[]) assert remove_color_code(output) == ( "error: not checking stubs due to mypy build errors:\n{}.pyi:2: " - 'error: Name "f" already defined on line 1\n'.format(TEST_MODULE_NAME) + 'error: Name "f" already defined on line 1 [no-redef]\n'.format(TEST_MODULE_NAME) ) def test_missing_stubs(self) -> None: @@ -1544,7 +1721,7 @@ def test_config_file(self) -> None: output = run_stubtest(stub=stub, runtime=runtime, options=[]) assert remove_color_code(output) == ( f"error: {TEST_MODULE_NAME}.temp variable differs from runtime type Literal[5]\n" - f"Stub: at line 2 in file {TEST_MODULE_NAME}.pyi\n_decimal.Decimal\nRuntime:\n5\n\n" + f"Stub: in file {TEST_MODULE_NAME}.pyi:2\n_decimal.Decimal\nRuntime:\n5\n\n" "Found 1 error (checked 1 module)\n" ) output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) diff --git a/mypy/test/testsubtypes.py b/mypy/test/testsubtypes.py index 22f48a88e879..c76a34ff00d7 100644 --- a/mypy/test/testsubtypes.py +++ b/mypy/test/testsubtypes.py @@ -273,6 +273,22 @@ def test_type_var_tuple_with_prefix_suffix(self) -> None: Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss), self.fx.b, self.fx.c]), ) + def test_type_var_tuple_unpacked_varlength_tuple(self) -> None: + self.assert_subtype( + Instance( + self.fx.gvi, + [ + UnpackType( + TupleType( + [self.fx.a, self.fx.b], + fallback=Instance(self.fx.std_tuplei, [self.fx.o]), + ) + ) + ], + ), + Instance(self.fx.gvi, [self.fx.a, self.fx.b]), + ) + def test_type_var_tuple_unpacked_tuple(self) -> None: self.assert_subtype( Instance( @@ -333,7 +349,7 @@ def test_type_var_tuple_unpacked_tuple(self) -> None: ) def test_type_var_tuple_unpacked_variable_length_tuple(self) -> None: - self.assert_strict_subtype( + self.assert_equivalent( Instance(self.fx.gvi, [self.fx.a, self.fx.a]), Instance(self.fx.gvi, [UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))]), ) diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py index 179b2f528b1e..1d3d4468444e 100644 --- a/mypy/test/testtransform.py +++ b/mypy/test/testtransform.py @@ -7,6 +7,7 @@ from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource +from mypy.options import TYPE_VAR_TUPLE, UNPACK from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options @@ -39,7 +40,7 @@ def test_transform(testcase: DataDrivenTestCase) -> None: options = parse_options(src, testcase, 1) options.use_builtins_fixtures = True options.semantic_analysis_only = True - options.enable_incomplete_features = True + options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] options.show_traceback = True result = build.build( sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py index 48e1695d0278..22ef4272e933 100644 --- a/mypy/test/testtypegen.py +++ b/mypy/test/testtypegen.py @@ -7,7 +7,7 @@ from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource -from mypy.nodes import NameExpr +from mypy.nodes import NameExpr, TempNode from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite @@ -34,6 +34,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: options.show_traceback = True options.export_types = True options.preserve_asts = True + options.allow_empty_bodies = True result = build.build( sources=[BuildSource("main", None, src)], options=options, @@ -53,7 +54,9 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: # Filter nodes that should be included in the output. keys = [] for node in nodes: - if node.line is not None and node.line != -1 and map[node]: + if isinstance(node, TempNode): + continue + if node.line != -1 and map[node]: if ignore_node(node) or node in ignored: continue if re.match(mask, short_type(node)) or ( diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 31bdd6690a7a..ee0256e2057a 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -31,6 +31,7 @@ UninhabitedType, UnionType, get_proper_type, + has_recursive_types, ) @@ -157,6 +158,13 @@ def test_type_alias_expand_all(self) -> None: [self.fx.a, self.fx.a], Instance(self.fx.std_tuplei, [self.fx.a]) ) + def test_recursive_nested_in_non_recursive(self) -> None: + A, _ = self.fx.def_alias_1(self.fx.a) + T = TypeVarType("T", "T", -1, [], self.fx.o) + NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) + assert not NA.is_recursive + assert has_recursive_types(NA) + def test_indirection_no_infinite_recursion(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) visitor = TypeIndirectionVisitor() diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index a78ad6e6f51b..d12e7abab0e2 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -56,9 +56,6 @@ def make_type_var( ) -> TypeVarType: return TypeVarType(name, name, id, values, upper_bound, variance) - def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: - return TypeVarTupleType(name, name, id, upper_bound) - self.t = make_type_var("T", 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var("T", -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var("T", -2, [], self.o, variance) # T`-2 (type variable) @@ -66,10 +63,7 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy self.s1 = make_type_var("S", 1, [], self.o, variance) # S`1 (type variable) self.sf = make_type_var("S", -2, [], self.o, variance) # S`-2 (type variable) self.sf1 = make_type_var("S", -1, [], self.o, variance) # S`-1 (type variable) - - self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) - self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) - self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) + self.u = make_type_var("U", 3, [], self.o, variance) # U`3 (type variable) # Simple types self.anyt = AnyType(TypeOfAny.special_form) @@ -132,10 +126,6 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy bases=[Instance(self.gi, [self.s1])], ) - self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) - self.gv2i = self.make_type_info( - "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 - ) # list[T] self.std_listi = self.make_type_info( "builtins.list", mro=[self.oi], typevars=["T"], variances=[variance] @@ -217,6 +207,18 @@ def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleTy self._add_bool_dunder(self.bool_type_info) self._add_bool_dunder(self.ai) + def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: + return TypeVarTupleType(name, name, id, upper_bound, self.std_tuple) + + self.ts = make_type_var_tuple("Ts", 1, self.o) # Ts`1 (type var tuple) + self.ss = make_type_var_tuple("Ss", 2, self.o) # Ss`2 (type var tuple) + self.us = make_type_var_tuple("Us", 3, self.o) # Us`3 (type var tuple) + + self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) + self.gv2i = self.make_type_info( + "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 + ) + def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef("__bool__", [], Block([])) @@ -295,7 +297,7 @@ def make_type_info( v: list[TypeVarLikeType] = [] for id, n in enumerate(typevars, 1): if typevar_tuple_index is not None and id - 1 == typevar_tuple_index: - v.append(TypeVarTupleType(n, n, id, self.o)) + v.append(TypeVarTupleType(n, n, id, self.o, self.std_tuple)) else: if variances: variance = variances[id - 1] @@ -338,9 +340,16 @@ def def_alias_2(self, base: Instance) -> tuple[TypeAliasType, Type]: A.alias = AN return A, target - def non_rec_alias(self, target: Type) -> TypeAliasType: - AN = TypeAlias(target, "__main__.A", -1, -1) - return TypeAliasType(AN, []) + def non_rec_alias( + self, + target: Type, + alias_tvars: list[TypeVarLikeType] | None = None, + args: list[Type] | None = None, + ) -> TypeAliasType: + AN = TypeAlias(target, "__main__.A", -1, -1, alias_tvars=alias_tvars) + if args is None: + args = [] + return TypeAliasType(AN, args) class InterfaceTypeFixture(TypeFixture): diff --git a/mypy/traverser.py b/mypy/traverser.py index 3c4f21601b88..038d948522f0 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -253,15 +253,17 @@ def visit_yield_expr(self, o: YieldExpr) -> None: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: + o.callee.accept(self) for a in o.args: a.accept(self) - o.callee.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_op_expr(self, o: OpExpr) -> None: o.left.accept(self) o.right.accept(self) + if o.analyzed is not None: + o.analyzed.accept(self) def visit_comparison_expr(self, o: ComparisonExpr) -> None: for operand in o.operands: diff --git a/mypy/treetransform.py b/mypy/treetransform.py index ca50afde7556..535f50d5cf5e 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -49,6 +49,7 @@ LambdaExpr, ListComprehension, ListExpr, + MatchStmt, MemberExpr, MypyFile, NamedTupleExpr, @@ -90,6 +91,17 @@ YieldExpr, YieldFromExpr, ) +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) from mypy.traverser import TraverserVisitor from mypy.types import FunctionLike, ProperType, Type from mypy.util import replace_object_state @@ -361,7 +373,7 @@ def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: - return TryStmt( + new = TryStmt( self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), @@ -369,6 +381,8 @@ def visit_try_stmt(self, node: TryStmt) -> TryStmt: self.optional_block(node.else_body), self.optional_block(node.finally_body), ) + new.is_star = node.is_star + return new def visit_with_stmt(self, node: WithStmt) -> WithStmt: new = WithStmt( @@ -381,6 +395,52 @@ def visit_with_stmt(self, node: WithStmt) -> WithStmt: new.analyzed_types = [self.type(typ) for typ in node.analyzed_types] return new + def visit_as_pattern(self, p: AsPattern) -> AsPattern: + return AsPattern( + pattern=self.pattern(p.pattern) if p.pattern is not None else None, + name=self.duplicate_name(p.name) if p.name is not None else None, + ) + + def visit_or_pattern(self, p: OrPattern) -> OrPattern: + return OrPattern([self.pattern(pat) for pat in p.patterns]) + + def visit_value_pattern(self, p: ValuePattern) -> ValuePattern: + return ValuePattern(self.expr(p.expr)) + + def visit_singleton_pattern(self, p: SingletonPattern) -> SingletonPattern: + return SingletonPattern(p.value) + + def visit_sequence_pattern(self, p: SequencePattern) -> SequencePattern: + return SequencePattern([self.pattern(pat) for pat in p.patterns]) + + def visit_starred_pattern(self, p: StarredPattern) -> StarredPattern: + return StarredPattern(self.duplicate_name(p.capture) if p.capture is not None else None) + + def visit_mapping_pattern(self, p: MappingPattern) -> MappingPattern: + return MappingPattern( + keys=[self.expr(expr) for expr in p.keys], + values=[self.pattern(pat) for pat in p.values], + rest=self.duplicate_name(p.rest) if p.rest is not None else None, + ) + + def visit_class_pattern(self, p: ClassPattern) -> ClassPattern: + class_ref = p.class_ref.accept(self) + assert isinstance(class_ref, RefExpr) + return ClassPattern( + class_ref=class_ref, + positionals=[self.pattern(pat) for pat in p.positionals], + keyword_keys=list(p.keyword_keys), + keyword_values=[self.pattern(pat) for pat in p.keyword_values], + ) + + def visit_match_stmt(self, o: MatchStmt) -> MatchStmt: + return MatchStmt( + subject=self.expr(o.subject), + patterns=[self.pattern(p) for p in o.patterns], + guards=self.optional_expressions(o.guards), + bodies=self.blocks(o.bodies), + ) + def visit_star_expr(self, node: StarExpr) -> StarExpr: return StarExpr(node.expr) @@ -459,7 +519,12 @@ def visit_call_expr(self, node: CallExpr) -> CallExpr: ) def visit_op_expr(self, node: OpExpr) -> OpExpr: - new = OpExpr(node.op, self.expr(node.left), self.expr(node.right)) + new = OpExpr( + node.op, + self.expr(node.left), + self.expr(node.right), + cast(Optional[TypeAliasExpr], self.optional_expr(node.analyzed)), + ) new.method_type = self.optional_type(node.method_type) return new @@ -490,7 +555,7 @@ def visit_super_expr(self, node: SuperExpr) -> SuperExpr: return new def visit_assignment_expr(self, node: AssignmentExpr) -> AssignmentExpr: - return AssignmentExpr(node.target, node.value) + return AssignmentExpr(self.expr(node.target), self.expr(node.value)) def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: new = UnaryExpr(node.op, self.expr(node.expr)) @@ -588,7 +653,11 @@ def visit_paramspec_expr(self, node: ParamSpecExpr) -> ParamSpecExpr: def visit_type_var_tuple_expr(self, node: TypeVarTupleExpr) -> TypeVarTupleExpr: return TypeVarTupleExpr( - node.name, node.fullname, self.type(node.upper_bound), variance=node.variance + node.name, + node.fullname, + self.type(node.upper_bound), + node.tuple_fallback, + variance=node.variance, ) def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: @@ -637,6 +706,12 @@ def stmt(self, stmt: Statement) -> Statement: new.set_line(stmt) return new + def pattern(self, pattern: Pattern) -> Pattern: + new = pattern.accept(self) + assert isinstance(new, Pattern) + new.set_line(pattern) + return new + # Helpers # # All the node helpers also propagate line numbers. diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index f926d0dfb883..9b432d8e68ec 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -115,6 +115,7 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: tvar_expr.fullname, i, upper_bound=tvar_expr.upper_bound, + tuple_fallback=tvar_expr.tuple_fallback, line=tvar_expr.line, column=tvar_expr.column, ) @@ -128,7 +129,7 @@ def bind_existing(self, tvar_def: TypeVarLikeType) -> None: def get_binding(self, item: str | SymbolTableNode) -> TypeVarLikeType | None: fullname = item.fullname if isinstance(item, SymbolTableNode) else item - assert fullname is not None + assert fullname if fullname in self.scope: return self.scope[fullname] elif self.parent is not None: diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index 3fbef63fd50e..5a5643f35c01 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -15,6 +15,7 @@ from abc import abstractmethod from typing import Any, Callable, Generic, Iterable, Sequence, TypeVar, cast +from typing_extensions import Final from mypy_extensions import mypyc_attr, trait @@ -34,7 +35,6 @@ PartialType, PlaceholderType, RawExpressionType, - StarType, TupleType, Type, TypeAliasType, @@ -152,11 +152,8 @@ def visit_unpack_type(self, t: UnpackType) -> T: class SyntheticTypeVisitor(TypeVisitor[T]): """A TypeVisitor that also knows how to visit synthetic AST constructs. - Not just real types.""" - - @abstractmethod - def visit_star_type(self, t: StarType) -> T: - pass + Not just real types. + """ @abstractmethod def visit_type_list(self, t: TypeList) -> T: @@ -209,7 +206,7 @@ def visit_instance(self, t: Instance) -> Type: last_known_value: LiteralType | None = None if t.last_known_value is not None: raw_last_known_value = t.last_known_value.accept(self) - assert isinstance(raw_last_known_value, LiteralType) # type: ignore + assert isinstance(raw_last_known_value, LiteralType) # type: ignore[misc] last_known_value = raw_last_known_value return Instance( typ=t.type, @@ -266,7 +263,7 @@ def visit_typeddict_type(self, t: TypedDictType) -> Type: def visit_literal_type(self, t: LiteralType) -> Type: fallback = t.fallback.accept(self) - assert isinstance(fallback, Instance) # type: ignore + assert isinstance(fallback, Instance) # type: ignore[misc] return LiteralType(value=t.value, fallback=fallback, line=t.line, column=t.column) def visit_union_type(self, t: UnionType) -> Type: @@ -284,7 +281,7 @@ def visit_overloaded(self, t: Overloaded) -> Type: items: list[CallableType] = [] for item in t.items: new = item.accept(self) - assert isinstance(new, CallableType) # type: ignore + assert isinstance(new, CallableType) # type: ignore[misc] items.append(new) return Overloaded(items=items) @@ -314,7 +311,7 @@ class TypeQuery(SyntheticTypeVisitor[T]): # TODO: check that we don't have existing violations of this rule. """ - def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None: + def __init__(self, strategy: Callable[[list[T]], T]) -> None: self.strategy = strategy # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. @@ -385,9 +382,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> T: def visit_literal_type(self, t: LiteralType) -> T: return self.strategy([]) - def visit_star_type(self, t: StarType) -> T: - return t.type.accept(self) - def visit_union_type(self, t: UnionType) -> T: return self.query_types(t.items) @@ -404,24 +398,168 @@ def visit_placeholder_type(self, t: PlaceholderType) -> T: return self.query_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> T: + # Skip type aliases already visited types to avoid infinite recursion. + # TODO: Ideally we should fire subvisitors here (or use caching) if we care + # about duplicates. + if t in self.seen_aliases: + return self.strategy([]) + self.seen_aliases.add(t) if self.skip_alias_target: return self.query_types(t.args) return get_proper_type(t).accept(self) def query_types(self, types: Iterable[Type]) -> T: - """Perform a query for a list of types. + """Perform a query for a list of types using the strategy to combine the results.""" + return self.strategy([t.accept(self) for t in types]) + + +# Return True if at least one type component returns True +ANY_STRATEGY: Final = 0 +# Return True if no type component returns False +ALL_STRATEGY: Final = 1 + - Use the strategy to combine the results. - Skip type aliases already visited types to avoid infinite recursion. +class BoolTypeQuery(SyntheticTypeVisitor[bool]): + """Visitor for performing recursive queries of types with a bool result. + + Use TypeQuery if you need non-bool results. + + 'strategy' is used to combine results for a series of types. It must + be ANY_STRATEGY or ALL_STRATEGY. + + Note: This visitor keeps an internal state (tracks type aliases to avoid + recursion), so it should *never* be re-used for querying different types + unless you call reset() first. + """ + + def __init__(self, strategy: int) -> None: + self.strategy = strategy + if strategy == ANY_STRATEGY: + self.default = False + else: + assert strategy == ALL_STRATEGY + self.default = True + # Keep track of the type aliases already visited. This is needed to avoid + # infinite recursion on types like A = Union[int, List[A]]. An empty set is + # represented as None as a micro-optimization. + self.seen_aliases: set[TypeAliasType] | None = None + # By default, we eagerly expand type aliases, and query also types in the + # alias target. In most cases this is a desired behavior, but we may want + # to skip targets in some cases (e.g. when collecting type variables). + self.skip_alias_target = False + + def reset(self) -> None: + """Clear mutable state (but preserve strategy). + + This *must* be called if you want to reuse the visitor. """ - res: list[T] = [] - for t in types: - if isinstance(t, TypeAliasType): - # Avoid infinite recursion for recursive type aliases. - # TODO: Ideally we should fire subvisitors here (or use caching) if we care - # about duplicates. - if t in self.seen_aliases: - continue - self.seen_aliases.add(t) - res.append(t.accept(self)) - return self.strategy(res) + self.seen_aliases = None + + def visit_unbound_type(self, t: UnboundType) -> bool: + return self.query_types(t.args) + + def visit_type_list(self, t: TypeList) -> bool: + return self.query_types(t.items) + + def visit_callable_argument(self, t: CallableArgument) -> bool: + return t.typ.accept(self) + + def visit_any(self, t: AnyType) -> bool: + return self.default + + def visit_uninhabited_type(self, t: UninhabitedType) -> bool: + return self.default + + def visit_none_type(self, t: NoneType) -> bool: + return self.default + + def visit_erased_type(self, t: ErasedType) -> bool: + return self.default + + def visit_deleted_type(self, t: DeletedType) -> bool: + return self.default + + def visit_type_var(self, t: TypeVarType) -> bool: + return self.query_types([t.upper_bound] + t.values) + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return self.default + + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return self.default + + def visit_unpack_type(self, t: UnpackType) -> bool: + return self.query_types([t.type]) + + def visit_parameters(self, t: Parameters) -> bool: + return self.query_types(t.arg_types) + + def visit_partial_type(self, t: PartialType) -> bool: + return self.default + + def visit_instance(self, t: Instance) -> bool: + return self.query_types(t.args) + + def visit_callable_type(self, t: CallableType) -> bool: + # FIX generics + # Avoid allocating any objects here as an optimization. + args = self.query_types(t.arg_types) + ret = t.ret_type.accept(self) + if self.strategy == ANY_STRATEGY: + return args or ret + else: + return args and ret + + def visit_tuple_type(self, t: TupleType) -> bool: + return self.query_types(t.items) + + def visit_typeddict_type(self, t: TypedDictType) -> bool: + return self.query_types(list(t.items.values())) + + def visit_raw_expression_type(self, t: RawExpressionType) -> bool: + return self.default + + def visit_literal_type(self, t: LiteralType) -> bool: + return self.default + + def visit_union_type(self, t: UnionType) -> bool: + return self.query_types(t.items) + + def visit_overloaded(self, t: Overloaded) -> bool: + return self.query_types(t.items) # type: ignore[arg-type] + + def visit_type_type(self, t: TypeType) -> bool: + return t.item.accept(self) + + def visit_ellipsis_type(self, t: EllipsisType) -> bool: + return self.default + + def visit_placeholder_type(self, t: PlaceholderType) -> bool: + return self.query_types(t.args) + + def visit_type_alias_type(self, t: TypeAliasType) -> bool: + # Skip type aliases already visited types to avoid infinite recursion. + # TODO: Ideally we should fire subvisitors here (or use caching) if we care + # about duplicates. + if self.seen_aliases is None: + self.seen_aliases = set() + elif t in self.seen_aliases: + return self.default + self.seen_aliases.add(t) + if self.skip_alias_target: + return self.query_types(t.args) + return get_proper_type(t).accept(self) + + def query_types(self, types: list[Type] | tuple[Type, ...]) -> bool: + """Perform a query for a sequence of types using the strategy to combine the results.""" + # Special-case for lists and tuples to allow mypyc to produce better code. + if isinstance(types, list): + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) + else: + if self.strategy == ANY_STRATEGY: + return any(t.accept(self) for t in types) + else: + return all(t.accept(self) for t in types) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index ae1920e234bb..f3329af6207a 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -4,14 +4,12 @@ import itertools from contextlib import contextmanager -from itertools import chain from typing import Callable, Iterable, Iterator, List, Sequence, Tuple, TypeVar from typing_extensions import Final, Protocol from mypy import errorcodes as codes, message_registry, nodes from mypy.errorcodes import ErrorCode -from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type -from mypy.messages import MessageBuilder, format_type_bare, quote_type_string +from mypy.messages import MessageBuilder, format_type_bare, quote_type_string, wrong_type_arg_count from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -23,7 +21,6 @@ ArgKind, Context, Decorator, - Expression, MypyFile, ParamSpecExpr, PlaceholderNode, @@ -38,17 +35,19 @@ check_arg_names, get_nongen_builtins, ) -from mypy.options import Options +from mypy.options import UNPACK, Options from mypy.plugin import AnalyzeTypeContext, Plugin, TypeAnalyzerPluginInterface from mypy.semanal_shared import SemanticAnalyzerCoreInterface, paramspec_args, paramspec_kwargs from mypy.tvar_scope import TypeVarLikeScope from mypy.types import ( ANNOTATED_TYPE_NAMES, + ANY_STRATEGY, FINAL_TYPE_NAMES, LITERAL_TYPE_NAMES, NEVER_NAMES, TYPE_ALIAS_NAMES, AnyType, + BoolTypeQuery, CallableArgument, CallableType, DeletedType, @@ -65,7 +64,6 @@ PlaceholderType, RawExpressionType, RequiredType, - StarType, SyntheticTypeVisitor, TrivialSyntheticTypeTranslator, TupleType, @@ -87,8 +85,10 @@ callable_with_ellipsis, flatten_nested_unions, get_proper_type, + has_type_vars, ) from mypy.typetraverser import TypeTraverserVisitor +from mypy.typevars import fill_typevars T = TypeVar("T") @@ -117,9 +117,11 @@ "asyncio.futures.Future", } +SELF_TYPE_NAMES: Final = {"typing.Self", "typing_extensions.Self"} + def analyze_type_alias( - node: Expression, + type: Type, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarLikeScope, plugin: Plugin, @@ -128,18 +130,14 @@ def analyze_type_alias( allow_placeholder: bool = False, in_dynamic_func: bool = False, global_scope: bool = True, -) -> tuple[Type, set[str]] | None: + allowed_alias_tvars: list[TypeVarLikeType] | None = None, +) -> tuple[Type, set[str]]: """Analyze r.h.s. of a (potential) type alias definition. If `node` is valid as a type alias rvalue, return the resulting type and a set of full names of type aliases it depends on (directly or indirectly). - Return None otherwise. 'node' must have been semantically analyzed. + 'node' must have been semantically analyzed. """ - try: - type = expr_to_unanalyzed_type(node, options, api.is_stub_file) - except TypeTranslationError: - api.fail("Invalid type alias: expression is not a valid type", node) - return None analyzer = TypeAnalyser( api, tvar_scope, @@ -148,6 +146,8 @@ def analyze_type_alias( is_typeshed_stub, defining_alias=True, allow_placeholder=allow_placeholder, + prohibit_self_type="type alias target", + allowed_alias_tvars=allowed_alias_tvars, ) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope @@ -196,10 +196,11 @@ def __init__( allow_required: bool = False, allow_param_spec_literals: bool = False, report_invalid_types: bool = True, + prohibit_self_type: str | None = None, + allowed_alias_tvars: list[TypeVarLikeType] | None = None, + allow_type_any: bool = False, ) -> None: self.api = api - self.lookup_qualified = api.lookup_qualified - self.lookup_fqn_func = api.lookup_fully_qualified self.fail_func = api.fail self.note_func = api.note self.tvar_scope = tvar_scope @@ -213,8 +214,12 @@ def __init__( self.always_allow_new_syntax = self.api.is_stub_file or self.api.is_future_flag_set( "annotations" ) - # Should we accept unbound type variables (always OK in aliases)? - self.allow_unbound_tvars = allow_unbound_tvars or defining_alias + # Should we accept unbound type variables? This is currently used for class bases, + # and alias right hand sides (before they are analyzed as type aliases). + self.allow_unbound_tvars = allow_unbound_tvars + if allowed_alias_tvars is None: + allowed_alias_tvars = [] + self.allowed_alias_tvars = allowed_alias_tvars # If false, record incomplete ref if we generate PlaceholderType. self.allow_placeholder = allow_placeholder # Are we in a context where Required[] is allowed? @@ -231,6 +236,17 @@ def __init__( self.is_typeshed_stub = is_typeshed_stub # Names of type aliases encountered while analysing a type will be collected here. self.aliases_used: set[str] = set() + self.prohibit_self_type = prohibit_self_type + # Allow variables typed as Type[Any] and type (useful for base classes). + self.allow_type_any = allow_type_any + + def lookup_qualified( + self, name: str, ctx: Context, suppress_errors: bool = False + ) -> SymbolTableNode | None: + return self.api.lookup_qualified(name, ctx, suppress_errors) + + def lookup_fully_qualified(self, name: str) -> SymbolTableNode: + return self.api.lookup_fully_qualified(name) def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) @@ -254,7 +270,15 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.api.defer() else: self.api.record_incomplete_ref() - return PlaceholderType(node.fullname, self.anal_array(t.args), t.line) + # Always allow ParamSpec for placeholders, if they are actually not valid, + # they will be reported later, after we resolve placeholders. + return PlaceholderType( + node.fullname, + self.anal_array( + t.args, allow_param_spec=True, allow_param_spec_literals=True + ), + t.line, + ) else: if self.api.final_iteration: self.cannot_resolve_type(t) @@ -281,11 +305,15 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) tvar_def = self.tvar_scope.get_binding(sym) if isinstance(sym.node, ParamSpecExpr): if tvar_def is None: - self.fail(f'ParamSpec "{t.name}" is unbound', t) + if self.allow_unbound_tvars: + return t + self.fail(f'ParamSpec "{t.name}" is unbound', t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, ParamSpecType) if len(t.args) > 0: - self.fail(f'ParamSpec "{t.name}" used with arguments', t) + self.fail( + f'ParamSpec "{t.name}" used with arguments', t, code=codes.VALID_TYPE + ) # Change the line number return ParamSpecType( tvar_def.name, @@ -296,17 +324,24 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) line=t.line, column=t.column, ) - if isinstance(sym.node, TypeVarExpr) and tvar_def is not None and self.defining_alias: + if ( + isinstance(sym.node, TypeVarExpr) + and self.defining_alias + and not defining_literal + and (tvar_def is None or tvar_def not in self.allowed_alias_tvars) + ): self.fail( - 'Can\'t use bound type variable "{}"' - " to define generic alias".format(t.name), + f'Can\'t use bound type variable "{t.name}" to define generic alias', t, + code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) if isinstance(sym.node, TypeVarExpr) and tvar_def is not None: assert isinstance(tvar_def, TypeVarType) if len(t.args) > 0: - self.fail(f'Type variable "{t.name}" used with arguments', t) + self.fail( + f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE + ) # Change the line number return TypeVarType( tvar_def.name, @@ -319,27 +354,33 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) column=t.column, ) if isinstance(sym.node, TypeVarTupleExpr) and ( - tvar_def is not None and self.defining_alias + tvar_def is not None + and self.defining_alias + and tvar_def not in self.allowed_alias_tvars ): self.fail( - 'Can\'t use bound type variable "{}"' - " to define generic alias".format(t.name), + f'Can\'t use bound type variable "{t.name}" to define generic alias', t, + code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) if isinstance(sym.node, TypeVarTupleExpr): if tvar_def is None: - self.fail(f'TypeVarTuple "{t.name}" is unbound', t) + self.fail(f'TypeVarTuple "{t.name}" is unbound', t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, TypeVarTupleType) if len(t.args) > 0: - self.fail(f'Type variable "{t.name}" used with arguments', t) + self.fail( + f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE + ) + # Change the line number return TypeVarTupleType( tvar_def.name, tvar_def.fullname, tvar_def.id, tvar_def.upper_bound, + sym.node.tuple_fallback, line=t.line, column=t.column, ) @@ -348,7 +389,14 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) return special if isinstance(node, TypeAlias): self.aliases_used.add(fullname) - an_args = self.anal_array(t.args) + an_args = self.anal_array( + t.args, + allow_param_spec=True, + allow_param_spec_literals=node.has_param_spec_type, + ) + if node.has_param_spec_type and len(node.alias_tvars) == 1: + an_args = self.pack_paramspec_args(an_args) + disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub res = expand_type_alias( node, @@ -391,29 +439,48 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) else: # sym is None return AnyType(TypeOfAny.special_form) + def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]: + # "Aesthetic" ParamSpec literals for single ParamSpec: C[int, str] -> C[[int, str]]. + # These do not support mypy_extensions VarArgs, etc. as they were already analyzed + # TODO: should these be re-analyzed to get rid of this inconsistency? + count = len(an_args) + if count > 0: + first_arg = get_proper_type(an_args[0]) + if not (count == 1 and isinstance(first_arg, (Parameters, ParamSpecType, AnyType))): + return [Parameters(an_args, [ARG_POS] * count, [None] * count)] + return list(an_args) + def cannot_resolve_type(self, t: UnboundType) -> None: # TODO: Move error message generation to messages.py. We'd first # need access to MessageBuilder here. Also move the similar # message generation logic in semanal.py. self.api.fail(f'Cannot resolve name "{t.name}" (possible cyclic definition)', t) - if self.options.enable_recursive_aliases and self.api.is_func_scope(): + if not self.options.disable_recursive_aliases and self.api.is_func_scope(): self.note("Recursive types are not allowed at function scope", t) def apply_concatenate_operator(self, t: UnboundType) -> Type: if len(t.args) == 0: - self.api.fail("Concatenate needs type arguments", t) + self.api.fail("Concatenate needs type arguments", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) # last argument has to be ParamSpec ps = self.anal_type(t.args[-1], allow_param_spec=True) if not isinstance(ps, ParamSpecType): - self.api.fail("The last parameter to Concatenate needs to be a ParamSpec", t) + if isinstance(ps, UnboundType) and self.allow_unbound_tvars: + sym = self.lookup_qualified(ps.name, t) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + return ps + self.api.fail( + "The last parameter to Concatenate needs to be a ParamSpec", + t, + code=codes.VALID_TYPE, + ) return AnyType(TypeOfAny.from_error) # TODO: this may not work well with aliases, if those worked. # Those should be special-cased. elif ps.prefix.arg_types: - self.api.fail("Nested Concatenates are invalid", t) + self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE) args = self.anal_array(t.args[:-1]) pre = ps.prefix @@ -434,10 +501,12 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ if fullname == "builtins.None": return NoneType() elif fullname == "typing.Any" or fullname == "builtins.Any": - return AnyType(TypeOfAny.explicit) + return AnyType(TypeOfAny.explicit, line=t.line, column=t.column) elif fullname in FINAL_TYPE_NAMES: self.fail( - "Final can be only used as an outermost qualifier in a variable annotation", t + "Final can be only used as an outermost qualifier in a variable annotation", + t, + code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) elif fullname == "typing.Tuple" or ( @@ -468,7 +537,9 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ return UnionType.make_union(items) elif fullname == "typing.Optional": if len(t.args) != 1: - self.fail("Optional[...] must have exactly one type argument", t) + self.fail( + "Optional[...] must have exactly one type argument", t, code=codes.VALID_TYPE + ) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) return make_optional_type(item) @@ -488,19 +559,25 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ return None if len(t.args) != 1: type_str = "Type[...]" if fullname == "typing.Type" else "type[...]" - self.fail(type_str + " must have exactly one type argument", t) + self.fail( + type_str + " must have exactly one type argument", t, code=codes.VALID_TYPE + ) item = self.anal_type(t.args[0]) if bad_type_type_item(item): - self.fail("Type[...] can't contain another Type[...]", t) + self.fail("Type[...] can't contain another Type[...]", t, code=codes.VALID_TYPE) item = AnyType(TypeOfAny.from_error) return TypeType.make_normalized(item, line=t.line, column=t.column) elif fullname == "typing.ClassVar": if self.nesting_level > 0: - self.fail("Invalid type: ClassVar nested inside other type", t) + self.fail( + "Invalid type: ClassVar nested inside other type", t, code=codes.VALID_TYPE + ) if len(t.args) == 0: return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) if len(t.args) != 1: - self.fail("ClassVar[...] must have at most one type argument", t) + self.fail( + "ClassVar[...] must have at most one type argument", t, code=codes.VALID_TYPE + ) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) elif fullname in NEVER_NAMES: @@ -513,34 +590,66 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ "Annotated[...] must have exactly one type argument" " and at least one annotation", t, + code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) elif fullname in ("typing_extensions.Required", "typing.Required"): if not self.allow_required: - self.fail("Required[] can be only used in a TypedDict definition", t) + self.fail( + "Required[] can be only used in a TypedDict definition", + t, + code=codes.VALID_TYPE, + ) return AnyType(TypeOfAny.from_error) if len(t.args) != 1: - self.fail("Required[] must have exactly one type argument", t) + self.fail( + "Required[] must have exactly one type argument", t, code=codes.VALID_TYPE + ) return AnyType(TypeOfAny.from_error) return RequiredType(self.anal_type(t.args[0]), required=True) elif fullname in ("typing_extensions.NotRequired", "typing.NotRequired"): if not self.allow_required: - self.fail("NotRequired[] can be only used in a TypedDict definition", t) + self.fail( + "NotRequired[] can be only used in a TypedDict definition", + t, + code=codes.VALID_TYPE, + ) return AnyType(TypeOfAny.from_error) if len(t.args) != 1: - self.fail("NotRequired[] must have exactly one type argument", t) + self.fail( + "NotRequired[] must have exactly one type argument", t, code=codes.VALID_TYPE + ) return AnyType(TypeOfAny.from_error) return RequiredType(self.anal_type(t.args[0]), required=False) elif self.anal_type_guard_arg(t, fullname) is not None: # In most contexts, TypeGuard[...] acts as an alias for bool (ignoring its args) return self.named_type("builtins.bool") elif fullname in ("typing.Unpack", "typing_extensions.Unpack"): - # We don't want people to try to use this yet. - if not self.options.enable_incomplete_features: - self.fail('"Unpack" is not supported by mypy yet', t) + if not self.api.incomplete_feature_enabled(UNPACK, t): + return AnyType(TypeOfAny.from_error) + if len(t.args) != 1: + self.fail("Unpack[...] requires exactly one type argument", t) return AnyType(TypeOfAny.from_error) return UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) + elif fullname in SELF_TYPE_NAMES: + if t.args: + self.fail("Self type cannot have type arguments", t) + if self.prohibit_self_type is not None: + self.fail(f"Self type cannot be used in {self.prohibit_self_type}", t) + return AnyType(TypeOfAny.from_error) + if self.api.type is None: + self.fail("Self type is only allowed in annotations within class definition", t) + return AnyType(TypeOfAny.from_error) + if self.api.type.has_base("builtins.type"): + self.fail("Self type cannot be used in a metaclass", t) + if self.api.type.self_type is not None: + if self.api.type.is_final: + return fill_typevars(self.api.type) + return self.api.type.self_type.copy_modified(line=t.line, column=t.column) + # TODO: verify this is unreachable and replace with an assert? + self.fail("Unexpected Self type", t) + return AnyType(TypeOfAny.from_error) return None def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: @@ -561,36 +670,24 @@ def analyze_type_with_type_info( fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) return TupleType(self.anal_array(args), fallback, ctx.line) - # This is a heuristic: it will be checked later anyways but the error - # message may be worse. - with self.set_allow_param_spec_literals(info.has_param_spec_type): - # Analyze arguments and (usually) construct Instance type. The - # number of type arguments and their values are - # checked only later, since we do not always know the - # valid count at this point. Thus we may construct an - # Instance with an invalid number of type arguments. - instance = Instance( - info, self.anal_array(args, allow_param_spec=True), ctx.line, ctx.column - ) - - # "aesthetic" paramspec literals - # these do not support mypy_extensions VarArgs, etc. as they were already analyzed - # TODO: should these be re-analyzed to get rid of this inconsistency? - # another inconsistency is with empty type args (Z[] is more possibly an error imo) - if len(info.type_vars) == 1 and info.has_param_spec_type and len(instance.args) > 0: - first_arg = get_proper_type(instance.args[0]) - - # TODO: can I use tuple syntax to isinstance multiple in 3.6? - if not ( - len(instance.args) == 1 - and ( - isinstance(first_arg, Parameters) - or isinstance(first_arg, ParamSpecType) - or isinstance(first_arg, AnyType) - ) - ): - args = instance.args - instance.args = (Parameters(args, [ARG_POS] * len(args), [None] * len(args)),) + # Analyze arguments and (usually) construct Instance type. The + # number of type arguments and their values are + # checked only later, since we do not always know the + # valid count at this point. Thus we may construct an + # Instance with an invalid number of type arguments. + # + # We allow ParamSpec literals based on a heuristic: it will be + # checked later anyways but the error message may be worse. + instance = Instance( + info, + self.anal_array( + args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type + ), + ctx.line, + ctx.column, + ) + if len(info.type_vars) == 1 and info.has_param_spec_type: + instance.args = tuple(self.pack_paramspec_args(instance.args)) if info.has_type_var_tuple_type: # - 1 to allow for the empty type var tuple case. @@ -613,21 +710,41 @@ def analyze_type_with_type_info( # The class has a Tuple[...] base class so it will be # represented as a tuple type. if info.special_alias: - return TypeAliasType(info.special_alias, self.anal_array(args)) + return expand_type_alias( + info.special_alias, + # TODO: should we allow NamedTuples generic in ParamSpec? + self.anal_array(args), + self.fail, + False, + ctx, + use_standard_error=True, + ) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if info.special_alias: - return TypeAliasType(info.special_alias, self.anal_array(args)) + return expand_type_alias( + info.special_alias, + # TODO: should we allow TypedDicts generic in ParamSpec? + self.anal_array(args), + self.fail, + False, + ctx, + use_standard_error=True, + ) # Create a named TypedDictType return td.copy_modified( item_types=self.anal_array(list(td.items.values())), fallback=instance ) if info.fullname == "types.NoneType": - self.fail("NoneType should not be used as a type, please use None instead", ctx) + self.fail( + "NoneType should not be used as a type, please use None instead", + ctx, + code=codes.VALID_TYPE, + ) return NoneType(ctx.line, ctx.column) return instance @@ -654,6 +771,11 @@ def analyze_unbound_type_without_type_info( return AnyType( TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name ) + elif self.allow_type_any: + if isinstance(typ, Instance) and typ.type.fullname == "builtins.type": + return AnyType(TypeOfAny.special_form) + if isinstance(typ, TypeType) and isinstance(typ.item, AnyType): + return AnyType(TypeOfAny.from_another_any, source_any=typ.item) # Option 2: # Unbound type variable. Currently these may be still valid, # for example when defining a generic type alias. @@ -680,7 +802,7 @@ def analyze_unbound_type_without_type_info( msg = message_registry.INVALID_TYPE_RAW_ENUM_VALUE.format( base_enum_short_name, value ) - self.fail(msg, t) + self.fail(msg.value, t, code=msg.code) return AnyType(TypeOfAny.from_error) return LiteralType( value=value, @@ -709,8 +831,8 @@ def analyze_unbound_type_without_type_info( else: notes.append('Perhaps you need "Callable[...]" or a callback protocol?') elif isinstance(sym.node, MypyFile): - # TODO: suggest a protocol when supported. message = 'Module "{}" is not valid as a type' + notes.append("Perhaps you meant to use a protocol matching the module structure?") elif unbound_tvar: message = 'Type variable "{}" is unbound' short = name.split(".")[-1] @@ -726,9 +848,11 @@ def analyze_unbound_type_without_type_info( ) else: message = 'Cannot interpret reference "{}" as a type' - self.fail(message.format(name), t, code=codes.VALID_TYPE) - for note in notes: - self.note(note, t, code=codes.VALID_TYPE) + if not defining_literal: + # Literal check already gives a custom error. Avoid duplicating errors. + self.fail(message.format(name), t, code=codes.VALID_TYPE) + for note in notes: + self.note(note, t, code=codes.VALID_TYPE) # TODO: Would it be better to always return Any instead of UnboundType # in case of an error? On one hand, UnboundType has a name so error messages @@ -763,12 +887,14 @@ def visit_type_list(self, t: TypeList) -> Type: else: return AnyType(TypeOfAny.from_error) else: - self.fail('Bracketed expression "[...]" is not valid as a type', t) + self.fail( + 'Bracketed expression "[...]" is not valid as a type', t, code=codes.VALID_TYPE + ) self.note('Did you mean "List[...]"?', t) return AnyType(TypeOfAny.from_error) def visit_callable_argument(self, t: CallableArgument) -> Type: - self.fail("Invalid type", t) + self.fail("Invalid type", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) def visit_instance(self, t: Instance) -> Type: @@ -799,7 +925,7 @@ def visit_callable_type(self, t: CallableType, nested: bool = True) -> Type: if self.defining_alias: variables = t.variables else: - variables = self.bind_function_type_variables(t, t) + variables, _ = self.bind_function_type_variables(t, t) special = self.anal_type_guard(t.ret_type) arg_kinds = t.arg_kinds if len(arg_kinds) >= 2 and arg_kinds[-2] == ARG_STAR and arg_kinds[-1] == ARG_STAR2: @@ -831,24 +957,38 @@ def anal_type_guard(self, t: Type) -> Type | None: def anal_type_guard_arg(self, t: UnboundType, fullname: str) -> Type | None: if fullname in ("typing_extensions.TypeGuard", "typing.TypeGuard"): if len(t.args) != 1: - self.fail("TypeGuard must have exactly one type argument", t) + self.fail( + "TypeGuard must have exactly one type argument", t, code=codes.VALID_TYPE + ) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) return None def anal_star_arg_type(self, t: Type, kind: ArgKind, nested: bool) -> Type: """Analyze signature argument type for *args and **kwargs argument.""" - # TODO: Check that suffix and kind match if isinstance(t, UnboundType) and t.name and "." in t.name and not t.args: components = t.name.split(".") - sym = self.lookup_qualified(".".join(components[:-1]), t) + tvar_name = ".".join(components[:-1]) + sym = self.lookup_qualified(tvar_name, t) if sym is not None and isinstance(sym.node, ParamSpecExpr): tvar_def = self.tvar_scope.get_binding(sym) if isinstance(tvar_def, ParamSpecType): if kind == ARG_STAR: make_paramspec = paramspec_args + if components[-1] != "args": + self.fail( + f'Use "{tvar_name}.args" for variadic "*" parameter', + t, + code=codes.VALID_TYPE, + ) elif kind == ARG_STAR2: make_paramspec = paramspec_kwargs + if components[-1] != "kwargs": + self.fail( + f'Use "{tvar_name}.kwargs" for variadic "**" parameter', + t, + code=codes.VALID_TYPE, + ) else: assert False, kind return make_paramspec( @@ -890,17 +1030,7 @@ def visit_tuple_type(self, t: TupleType) -> Type: code=codes.SYNTAX, ) return AnyType(TypeOfAny.from_error) - star_count = sum(1 for item in t.items if isinstance(item, StarType)) - if star_count > 1: - self.fail("At most one star type allowed in a tuple", t) - if t.implicit: - return TupleType( - [AnyType(TypeOfAny.from_error) for _ in t.items], - self.named_type("builtins.tuple"), - t.line, - ) - else: - return AnyType(TypeOfAny.from_error) + any_type = AnyType(TypeOfAny.special_form) # If the fallback isn't filled in yet, its type will be the falsey FakeInfo fallback = ( @@ -952,9 +1082,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> Type: def visit_literal_type(self, t: LiteralType) -> Type: return t - def visit_star_type(self, t: StarType) -> Type: - return StarType(self.anal_type(t.type), t.line) - def visit_union_type(self, t: UnionType) -> Type: if ( t.uses_pep604_syntax is True @@ -962,7 +1089,7 @@ def visit_union_type(self, t: UnionType) -> Type: and not self.always_allow_new_syntax and not self.options.python_version >= (3, 10) ): - self.fail("X | Y syntax for unions requires Python 3.10", t) + self.fail("X | Y syntax for unions requires Python 3.10", t, code=codes.SYNTAX) return UnionType(self.anal_array(t.items), t.line) def visit_partial_type(self, t: PartialType) -> Type: @@ -982,7 +1109,7 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_placeholder_type(self, t: PlaceholderType) -> Type: - n = None if t.fullname is None else self.api.lookup_fully_qualified(t.fullname) + n = None if not t.fullname else self.api.lookup_fully_qualified(t.fullname) if not n or isinstance(n.node, PlaceholderNode): self.api.defer() # Still incomplete return t @@ -1002,6 +1129,16 @@ def analyze_callable_args_for_paramspec( return None tvar_def = self.tvar_scope.get_binding(sym) if not isinstance(tvar_def, ParamSpecType): + if ( + tvar_def is None + and self.allow_unbound_tvars + and isinstance(sym.node, ParamSpecExpr) + ): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) return None return CallableType( @@ -1037,6 +1174,14 @@ def analyze_callable_args_for_concatenate( tvar_def = self.anal_type(callable_args, allow_param_spec=True) if not isinstance(tvar_def, ParamSpecType): + if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType): + sym = self.lookup_qualified(tvar_def.name, callable_args) + if sym is not None and isinstance(sym.node, ParamSpecExpr): + # We are analyzing this type in runtime context (e.g. as type application). + # If it is not valid as a type in this position an error will be given later. + return callable_with_ellipsis( + AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback + ) return None # ick, CallableType should take ParamSpecType @@ -1092,6 +1237,7 @@ def analyze_callable_type(self, t: UnboundType) -> Type: "The first argument to Callable must be a " 'list of types, parameter specification, or "..."', t, + code=codes.VALID_TYPE, ) self.note( "See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas", # noqa: E501 @@ -1114,6 +1260,7 @@ def analyze_callable_args( args: list[Type] = [] kinds: list[ArgKind] = [] names: list[str | None] = [] + found_unpack = False for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) @@ -1134,6 +1281,19 @@ def analyze_callable_args( if arg.name is not None and kind.is_star(): self.fail(f"{arg.constructor} arguments should not have names", arg) return None + elif isinstance(arg, UnboundType): + kind = ARG_POS + # Potentially a unpack. + sym = self.lookup_qualified(arg.name, arg) + if sym is not None: + if sym.fullname == "typing_extensions.Unpack": + if found_unpack: + self.fail("Callables can only have a single unpack", arg) + found_unpack = True + kind = ARG_STAR + args.append(arg) + kinds.append(kind) + names.append(None) else: args.append(arg) kinds.append(ARG_POS) @@ -1145,7 +1305,7 @@ def analyze_callable_args( def analyze_literal_type(self, t: UnboundType) -> Type: if len(t.args) == 0: - self.fail("Literal[...] must have at least one parameter", t) + self.fail("Literal[...] must have at least one parameter", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) output: list[Type] = [] @@ -1196,7 +1356,11 @@ def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> list[Type] # TODO: Once we start adding support for enums, make sure we report a custom # error for case 2 as well. if arg.type_of_any not in (TypeOfAny.from_error, TypeOfAny.special_form): - self.fail(f'Parameter {idx} of Literal[...] cannot be of type "Any"', ctx) + self.fail( + f'Parameter {idx} of Literal[...] cannot be of type "Any"', + ctx, + code=codes.VALID_TYPE, + ) return None elif isinstance(arg, RawExpressionType): # A raw literal. Convert it directly into a literal if we can. @@ -1206,7 +1370,7 @@ def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> list[Type] msg = f'Parameter {idx} of Literal[...] cannot be of type "{name}"' else: msg = "Invalid type: Literal[...] cannot contain arbitrary expressions" - self.fail(msg, ctx) + self.fail(msg, ctx, code=codes.VALID_TYPE) # Note: we deliberately ignore arg.note here: the extra info might normally be # helpful, but it generally won't make sense in the context of a Literal[...]. return None @@ -1230,7 +1394,7 @@ def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> list[Type] out.extend(union_result) return out else: - self.fail(f"Parameter {idx} of Literal[...] is invalid", ctx) + self.fail(f"Parameter {idx} of Literal[...] is invalid", ctx, code=codes.VALID_TYPE) return None def analyze_type(self, t: Type) -> Type: @@ -1249,14 +1413,17 @@ def tvar_scope_frame(self) -> Iterator[None]: yield self.tvar_scope = old_scope + def find_type_var_likes(self, t: Type, include_callables: bool = True) -> TypeVarLikeList: + return t.accept( + TypeVarLikeQuery(self.api, self.tvar_scope, include_callables=include_callables) + ) + def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLikeExpr]]: """Return list of unique type variables referred to in a callable.""" names: list[str] = [] tvars: list[TypeVarLikeExpr] = [] for arg in type.arg_types: - for name, tvar_expr in arg.accept( - TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope) - ): + for name, tvar_expr in self.find_type_var_likes(arg): if name not in names: names.append(name) tvars.append(tvar_expr) @@ -1264,41 +1431,53 @@ def infer_type_variables(self, type: CallableType) -> list[tuple[str, TypeVarLik # look inside Callable types. Type variables only appearing in # functions in the return type belong to those functions, not the # function we're currently analyzing. - for name, tvar_expr in type.ret_type.accept( - TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope, include_callables=False) - ): + for name, tvar_expr in self.find_type_var_likes(type.ret_type, include_callables=False): if name not in names: names.append(name) tvars.append(tvar_expr) + + if not names: + return [] # Fast path return list(zip(names, tvars)) def bind_function_type_variables( self, fun_type: CallableType, defn: Context - ) -> Sequence[TypeVarLikeType]: + ) -> tuple[Sequence[TypeVarLikeType], bool]: """Find the type variables of the function type and bind them in our tvar_scope""" + has_self_type = False if fun_type.variables: + defs = [] for var in fun_type.variables: + if self.api.type and self.api.type.self_type and var == self.api.type.self_type: + has_self_type = True + continue var_node = self.lookup_qualified(var.name, defn) assert var_node, "Binding for function type variable not found within function" var_expr = var_node.node assert isinstance(var_expr, TypeVarLikeExpr) - self.tvar_scope.bind_new(var.name, var_expr) - return fun_type.variables + binding = self.tvar_scope.bind_new(var.name, var_expr) + defs.append(binding) + return defs, has_self_type typevars = self.infer_type_variables(fun_type) + has_self_type = find_self_type( + fun_type, lambda name: self.api.lookup_qualified(name, defn, suppress_errors=True) + ) # Do not define a new type variable if already defined in scope. typevars = [ (name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn) ] - defs: list[TypeVarLikeType] = [] + defs = [] for name, tvar in typevars: if not self.tvar_scope.allow_binding(tvar.fullname): - self.fail(f'Type variable "{name}" is bound by an outer class', defn) - self.tvar_scope.bind_new(name, tvar) - binding = self.tvar_scope.get_binding(tvar.fullname) - assert binding is not None + self.fail( + f'Type variable "{name}" is bound by an outer class', + defn, + code=codes.VALID_TYPE, + ) + binding = self.tvar_scope.bind_new(name, tvar) defs.append(binding) - return defs + return defs, has_self_type def is_defined_type_var(self, tvar: str, context: Context) -> bool: tvar_node = self.lookup_qualified(tvar, context) @@ -1307,12 +1486,20 @@ def is_defined_type_var(self, tvar: str, context: Context) -> bool: return self.tvar_scope.get_binding(tvar_node) is not None def anal_array( - self, a: Iterable[Type], nested: bool = True, *, allow_param_spec: bool = False + self, + a: Iterable[Type], + nested: bool = True, + *, + allow_param_spec: bool = False, + allow_param_spec_literals: bool = False, ) -> list[Type]: + old_allow_param_spec_literals = self.allow_param_spec_literals + self.allow_param_spec_literals = allow_param_spec_literals res: list[Type] = [] for t in a: res.append(self.anal_type(t, nested, allow_param_spec=allow_param_spec)) - return res + self.allow_param_spec_literals = old_allow_param_spec_literals + return self.check_unpacks_in_list(res) def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = False) -> Type: if nested: @@ -1331,10 +1518,12 @@ def anal_type(self, t: Type, nested: bool = True, *, allow_param_spec: bool = Fa and analyzed.flavor == ParamSpecFlavor.BARE ): if analyzed.prefix.arg_types: - self.fail("Invalid location for Concatenate", t) + self.fail("Invalid location for Concatenate", t, code=codes.VALID_TYPE) self.note("You can use Concatenate as the first argument to Callable", t) else: - self.fail(f'Invalid location for ParamSpec "{analyzed.name}"', t) + self.fail( + f'Invalid location for ParamSpec "{analyzed.name}"', t, code=codes.VALID_TYPE + ) self.note( "You can use ParamSpec as the first argument to Callable, e.g., " "'Callable[{}, int]'".format(analyzed.name), @@ -1366,26 +1555,37 @@ def named_type( line: int = -1, column: int = -1, ) -> Instance: - node = self.lookup_fqn_func(fully_qualified_name) + node = self.lookup_fully_qualified(fully_qualified_name) assert isinstance(node.node, TypeInfo) any_type = AnyType(TypeOfAny.special_form) + if args is not None: + args = self.check_unpacks_in_list(args) return Instance( node.node, args or [any_type] * len(node.node.defn.type_vars), line=line, column=column ) + def check_unpacks_in_list(self, items: list[Type]) -> list[Type]: + new_items: list[Type] = [] + num_unpacks = 0 + final_unpack = None + for item in items: + if isinstance(item, UnpackType): + if not num_unpacks: + new_items.append(item) + num_unpacks += 1 + final_unpack = item + else: + new_items.append(item) + + if num_unpacks > 1: + assert final_unpack is not None + self.fail("More than one Unpack in a type is not allowed", final_unpack) + return new_items + def tuple_type(self, items: list[Type]) -> TupleType: any_type = AnyType(TypeOfAny.special_form) return TupleType(items, fallback=self.named_type("builtins.tuple", [any_type])) - @contextmanager - def set_allow_param_spec_literals(self, to: bool) -> Iterator[None]: - old = self.allow_param_spec_literals - try: - self.allow_param_spec_literals = to - yield - finally: - self.allow_param_spec_literals = old - TypeVarLikeList = List[Tuple[str, TypeVarLikeExpr]] @@ -1473,16 +1673,11 @@ def fix_instance( t.args = (any_type,) * len(t.type.type_vars) return # Invalid number of type parameters. - n = len(t.type.type_vars) - s = f"{n} type arguments" - if n == 0: - s = "no type arguments" - elif n == 1: - s = "1 type argument" - act = str(len(t.args)) - if act == "0": - act = "none" - fail(f'"{t.type.name}" expects {s}, but {act} given', t, code=codes.TYPE_ARG) + fail( + wrong_type_arg_count(len(t.type.type_vars), str(len(t.args)), t.type.name), + t, + code=codes.TYPE_ARG, + ) # Construct the correct number of type arguments, as # otherwise the type checker may crash as it expects # things to be right. @@ -1499,16 +1694,17 @@ def expand_type_alias( *, unexpanded_type: Type | None = None, disallow_any: bool = False, + use_standard_error: bool = False, ) -> Type: """Expand a (generic) type alias target following the rules outlined in TypeAlias docstring. Here: - target: original target type (contains unbound type variables) - alias_tvars: type variable names + target: original target type args: types to be substituted in place of type variables fail: error reporter callback no_args: whether original definition used a bare generic `A = List` ctx: context where expansion happens + unexpanded_type, disallow_any, use_standard_error: used to customize error messages """ exp_len = len(node.alias_tvars) act_len = len(args) @@ -1540,13 +1736,22 @@ def expand_type_alias( tp.column = ctx.column return tp if act_len != exp_len: - fail(f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}", ctx) + if use_standard_error: + # This is used if type alias is an internal representation of another type, + # for example a generic TypedDict or NamedTuple. + msg = wrong_type_arg_count(exp_len, str(act_len), node.name) + else: + msg = f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}" + fail(msg, ctx, code=codes.TYPE_ARG) return set_any_tvars(node, ctx.line, ctx.column, from_error=True) + # TODO: we need to check args validity w.r.t alias.alias_tvars. + # Otherwise invalid instantiations will be allowed in runtime context. + # Note: in type context, these will be still caught by semanal_typeargs. typ = TypeAliasType(node, args, ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. if ( - isinstance(typ.alias.target, Instance) # type: ignore + isinstance(typ.alias.target, Instance) # type: ignore[misc] and typ.alias.target.type.fullname == "mypy_extensions.FlexibleAlias" ): exp = get_proper_type(typ) @@ -1589,7 +1794,9 @@ def set_any_tvars( return TypeAliasType(node, [any_type] * len(node.alias_tvars), newline, newcolumn) -def remove_dups(tvars: Iterable[T]) -> list[T]: +def remove_dups(tvars: list[T]) -> list[T]: + if len(tvars) <= 1: + return tvars # Get unique elements in order of appearance all_tvars: set[T] = set() new_tvars: list[T] = [] @@ -1600,8 +1807,13 @@ def remove_dups(tvars: Iterable[T]) -> list[T]: return new_tvars -def flatten_tvars(ll: Iterable[list[T]]) -> list[T]: - return remove_dups(chain.from_iterable(ll)) +def flatten_tvars(lists: list[list[T]]) -> list[T]: + result: list[T] = [] + for lst in lists: + for item in lst: + if item not in result: + result.append(item) + return result class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): @@ -1609,17 +1821,15 @@ class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): def __init__( self, - lookup: Callable[[str, Context], SymbolTableNode | None], + api: SemanticAnalyzerCoreInterface, scope: TypeVarLikeScope, *, include_callables: bool = True, - include_bound_tvars: bool = False, ) -> None: - self.include_callables = include_callables - self.lookup = lookup - self.scope = scope - self.include_bound_tvars = include_bound_tvars super().__init__(flatten_tvars) + self.api = api + self.scope = scope + self.include_callables = include_callables # Only include type variables in type aliases args. This would be anyway # that case if we expand (as target variables would be overridden with args) # and it may cause infinite recursion on invalid (diverging) recursive aliases. @@ -1628,9 +1838,7 @@ def __init__( def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False - if isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)): - return True - return False + return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)) def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: name = t.name @@ -1639,16 +1847,16 @@ def visit_unbound_type(self, t: UnboundType) -> TypeVarLikeList: if name.endswith("args"): if name.endswith(".args") or name.endswith(".kwargs"): base = ".".join(name.split(".")[:-1]) - n = self.lookup(base, t) + n = self.api.lookup_qualified(base, t) if n is not None and isinstance(n.node, ParamSpecExpr): node = n name = base if node is None: - node = self.lookup(name, t) + node = self.api.lookup_qualified(name, t) if ( node and isinstance(node.node, TypeVarLikeExpr) - and (self.include_bound_tvars or self.scope.get_binding(node) is None) + and self.scope.get_binding(node) is None ): assert isinstance(node.node, TypeVarLikeExpr) return [(name, node.node)] @@ -1684,26 +1892,11 @@ def __init__( self.scope = scope self.diverging = False - def is_alias_tvar(self, t: Type) -> bool: - # Generic type aliases use unbound type variables. - if not isinstance(t, UnboundType) or t.args: - return False - node = self.lookup(t.name, t) - if ( - node - and isinstance(node.node, TypeVarLikeExpr) - and self.scope.get_binding(node) is None - ): - return True - return False - def visit_type_alias_type(self, t: TypeAliasType) -> Type: assert t.alias is not None, f"Unfixed type alias {t.type_ref}" if t.alias in self.seen_nodes: for arg in t.args: - if not self.is_alias_tvar(arg) and bool( - arg.accept(TypeVarLikeQuery(self.lookup, self.scope)) - ): + if not isinstance(arg, TypeVarLikeType) and has_type_vars(arg): self.diverging = True return t # All clear for this expansion chain. @@ -1777,9 +1970,9 @@ def has_any_from_unimported_type(t: Type) -> bool: return t.accept(HasAnyFromUnimportedType()) -class HasAnyFromUnimportedType(TypeQuery[bool]): +class HasAnyFromUnimportedType(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.from_unimported_type @@ -1860,3 +2053,19 @@ def visit_instance(self, typ: Instance) -> None: python_version=self.python_version, use_generic_error=True, ) + + +def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool: + return typ.accept(HasSelfType(lookup)) + + +class HasSelfType(BoolTypeQuery): + def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None: + self.lookup = lookup + super().__init__(ANY_STRATEGY) + + def visit_unbound_type(self, t: UnboundType) -> bool: + sym = self.lookup(t.name) + if sym and sym.fullname in SELF_TYPE_NAMES: + return True + return super().visit_unbound_type(t) diff --git a/mypy/typeops.py b/mypy/typeops.py index b1aa2a189a20..8c01fb118076 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -8,7 +8,7 @@ from __future__ import annotations import itertools -from typing import Any, Iterable, List, Sequence, Type as TypingType, TypeVar, cast +from typing import Any, Iterable, List, Sequence, TypeVar, cast from mypy.copytype import copy_type from mypy.expandtype import expand_type, expand_type_by_instance @@ -33,6 +33,7 @@ ENUM_REMOVED_PROPS, AnyType, CallableType, + ExtraAttrs, FormalArgument, FunctionLike, Instance, @@ -41,10 +42,12 @@ Overloaded, Parameters, ParamSpecType, + PartialType, ProperType, TupleType, Type, TypeAliasType, + TypedDictType, TypeOfAny, TypeQuery, TypeType, @@ -70,13 +73,13 @@ def is_recursive_pair(s: Type, t: Type) -> bool: """ if isinstance(s, TypeAliasType) and s.is_recursive: return ( - isinstance(get_proper_type(t), Instance) + isinstance(get_proper_type(t), (Instance, UnionType)) or isinstance(t, TypeAliasType) and t.is_recursive ) if isinstance(t, TypeAliasType) and t.is_recursive: return ( - isinstance(get_proper_type(s), Instance) + isinstance(get_proper_type(s), (Instance, UnionType)) or isinstance(s, TypeAliasType) and s.is_recursive ) @@ -104,7 +107,16 @@ def tuple_fallback(typ: TupleType) -> Instance: raise NotImplementedError else: items.append(item) - return Instance(info, [join_type_list(items)]) + return Instance(info, [join_type_list(items)], extra_attrs=typ.partial_fallback.extra_attrs) + + +def get_self_type(func: CallableType, default_self: Instance | TupleType) -> Type | None: + if isinstance(get_proper_type(func.ret_type), UninhabitedType): + return func.ret_type + elif func.arg_types and func.arg_types[0] != default_self and func.arg_kinds[0] == ARG_POS: + return func.arg_types[0] + else: + return None def type_object_type_from_function( @@ -117,14 +129,7 @@ def type_object_type_from_function( # classes such as subprocess.Popen. default_self = fill_typevars(info) if not is_new and not info.is_newtype: - orig_self_types = [ - ( - it.arg_types[0] - if it.arg_types and it.arg_types[0] != default_self and it.arg_kinds[0] == ARG_POS - else None - ) - for it in signature.items - ] + orig_self_types = [get_self_type(it, default_self) for it in signature.items] else: orig_self_types = [None] * len(signature.items) @@ -177,7 +182,7 @@ def class_callable( default_ret_type = fill_typevars(info) explicit_type = init_ret_type if is_new else orig_self_type if ( - isinstance(explicit_type, (Instance, TupleType)) + isinstance(explicit_type, (Instance, TupleType, UninhabitedType)) # We have to skip protocols, because it can be a subtype of a return type # by accident. Like `Hashable` is a subtype of `object`. See #11799 and isinstance(default_ret_type, Instance) @@ -436,6 +441,7 @@ def make_simplified_union( * [int, int] -> int * [int, Any] -> Union[int, Any] (Any types are not simplified away!) * [Any, Any] -> Any + * [int, Union[bytes, str]] -> Union[int, bytes, str] Note: This must NOT be used during semantic analysis, since TypeInfos may not be fully initialized. @@ -450,17 +456,45 @@ def make_simplified_union( # Step 1: expand all nested unions items = flatten_nested_unions(items) - # Step 2: remove redundant unions + # Step 2: fast path for single item + if len(items) == 1: + return get_proper_type(items[0]) + + # Step 3: remove redundant unions simplified_set: Sequence[Type] = _remove_redundant_union_items(items, keep_erased) - # Step 3: If more than one literal exists in the union, try to simplify + # Step 4: If more than one literal exists in the union, try to simplify if ( contract_literals and sum(isinstance(get_proper_type(item), LiteralType) for item in simplified_set) > 1 ): simplified_set = try_contracting_literals_in_union(simplified_set) - return get_proper_type(UnionType.make_union(simplified_set, line, column)) + result = get_proper_type(UnionType.make_union(simplified_set, line, column)) + + nitems = len(items) + if nitems > 1 and ( + nitems > 2 or not (type(items[0]) is NoneType or type(items[1]) is NoneType) + ): + # Step 5: At last, we erase any (inconsistent) extra attributes on instances. + + # Initialize with None instead of an empty set as a micro-optimization. The set + # is needed very rarely, so we try to avoid constructing it. + extra_attrs_set: set[ExtraAttrs] | None = None + for item in items: + instance = try_getting_instance_fallback(item) + if instance and instance.extra_attrs: + if extra_attrs_set is None: + extra_attrs_set = {instance.extra_attrs} + else: + extra_attrs_set.add(instance.extra_attrs) + + if extra_attrs_set is not None and len(extra_attrs_set) > 1: + fallback = try_getting_instance_fallback(result) + if fallback: + fallback.extra_attrs = None + + return result def _remove_redundant_union_items(items: list[Type], keep_erased: bool) -> list[Type]: @@ -515,7 +549,7 @@ def _remove_redundant_union_items(items: list[Type], keep_erased: bool) -> list[ continue # actual redundancy checks (XXX?) if is_redundant_literal_instance(proper_item, proper_tj) and is_proper_subtype( - tj, item, keep_erased_types=keep_erased + tj, item, keep_erased_types=keep_erased, ignore_promotions=True ): # We found a redundant item in the union. removed.add(j) @@ -741,7 +775,7 @@ def try_getting_int_literals_from_type(typ: Type) -> list[int] | None: def try_getting_literals_from_type( - typ: Type, target_literal_type: TypingType[T], target_fullname: str + typ: Type, target_literal_type: type[T], target_fullname: str ) -> list[T] | None: """If the given expression or type corresponds to a Literal or union of Literals where the underlying values correspond to the given @@ -982,3 +1016,37 @@ def separate_union_literals(t: UnionType) -> tuple[Sequence[LiteralType], Sequen union_items.append(item) return literal_items, union_items + + +def try_getting_instance_fallback(typ: Type) -> Instance | None: + """Returns the Instance fallback for this type if one exists or None.""" + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return typ + elif isinstance(typ, LiteralType): + return typ.fallback + elif isinstance(typ, NoneType): + return None # Fast path for None, which is common + elif isinstance(typ, FunctionLike): + return typ.fallback + elif isinstance(typ, TupleType): + return typ.partial_fallback + elif isinstance(typ, TypedDictType): + return typ.fallback + elif isinstance(typ, TypeVarType): + return try_getting_instance_fallback(typ.upper_bound) + return None + + +def fixup_partial_type(typ: Type) -> Type: + """Convert a partial type that we couldn't resolve into something concrete. + + This means, for None we make it Optional[Any], and for anything else we + fill in all of the type arguments with Any. + """ + if not isinstance(typ, PartialType): + return typ + if typ.type is None: + return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) + else: + return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) diff --git a/mypy/types.py b/mypy/types.py index cfb6c62de147..9858559ad5c1 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -7,16 +7,18 @@ from typing import ( TYPE_CHECKING, Any, + Callable, ClassVar, Dict, Iterable, NamedTuple, + NewType, Sequence, TypeVar, Union, cast, ) -from typing_extensions import Final, TypeAlias as _TypeAlias, overload +from typing_extensions import Final, TypeAlias as _TypeAlias, TypeGuard, overload import mypy.nodes from mypy.bogus_type import Bogus @@ -28,6 +30,7 @@ ArgKind, FakeInfo, FuncDef, + FuncItem, SymbolNode, ) from mypy.state import state @@ -64,7 +67,10 @@ # Note: Although "Literal[None]" is a valid type, we internally always convert # such a type directly into "None". So, "None" is not a valid parameter of # LiteralType and is omitted from this list. -LiteralValue: _TypeAlias = Union[int, str, bool] +# +# Note: Float values are only used internally. They are not accepted within +# Literal[...]. +LiteralValue: _TypeAlias = Union[int, str, bool, float] # If we only import type_visitor in the middle of the file, mypy @@ -144,9 +150,20 @@ "typing_extensions.Never", ) +# Mypyc fixed-width native int types (compatible with builtins.int) +MYPYC_NATIVE_INT_NAMES: Final = ("mypy_extensions.i64", "mypy_extensions.i32") + +DATACLASS_TRANSFORM_NAMES: Final = ( + "typing.dataclass_transform", + "typing_extensions.dataclass_transform", +) + # A placeholder used for Bogus[...] parameters _dummy: Final[Any] = object() +# A placeholder for int parameters +_dummy_int: Final = -999999 + class TypeOfAny: """ @@ -192,7 +209,7 @@ def deserialize_type(data: JsonDict | str) -> Type: class Type(mypy.nodes.Context): """Abstract base class for all types.""" - __slots__ = ("can_be_true", "can_be_false") + __slots__ = ("_can_be_true", "_can_be_false") # 'can_be_true' and 'can_be_false' mean whether the value of the # expression can be true or false in a boolean context. They are useful # when inferring the type of logic expressions like `x and y`. @@ -205,8 +222,29 @@ class Type(mypy.nodes.Context): def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) - self.can_be_true = self.can_be_true_default() - self.can_be_false = self.can_be_false_default() + # Value of these can be -1 (use the default, lazy init), 0 (false) or 1 (true) + self._can_be_true = -1 + self._can_be_false = -1 + + @property + def can_be_true(self) -> bool: + if self._can_be_true == -1: # Lazy init helps mypyc + self._can_be_true = self.can_be_true_default() + return bool(self._can_be_true) + + @can_be_true.setter + def can_be_true(self, v: bool) -> None: + self._can_be_true = v + + @property + def can_be_false(self) -> bool: + if self._can_be_false == -1: # Lazy init helps mypyc + self._can_be_false = self.can_be_false_default() + return bool(self._can_be_false) + + @can_be_false.setter + def can_be_false(self, v: bool) -> None: + self._can_be_false = v def can_be_true_default(self) -> bool: return True @@ -255,10 +293,10 @@ def __init__( line: int = -1, column: int = -1, ) -> None: + super().__init__(line, column) self.alias = alias self.args = args self.type_ref: str | None = None - super().__init__(line, column) def _expand_once(self) -> Type: """Expand to the target type exactly once. @@ -277,30 +315,42 @@ def _expand_once(self) -> Type: self.alias.target, self.alias.alias_tvars, self.args, self.line, self.column ) - def _partial_expansion(self) -> tuple[ProperType, bool]: + def _partial_expansion(self, nothing_args: bool = False) -> tuple[ProperType, bool]: # Private method mostly for debugging and testing. unroller = UnrollAliasVisitor(set()) - unrolled = self.accept(unroller) + if nothing_args: + alias = self.copy_modified(args=[UninhabitedType()] * len(self.args)) + else: + alias = self + unrolled = alias.accept(unroller) assert isinstance(unrolled, ProperType) return unrolled, unroller.recursed - def expand_all_if_possible(self) -> ProperType | None: + def expand_all_if_possible(self, nothing_args: bool = False) -> ProperType | None: """Attempt a full expansion of the type alias (including nested aliases). If the expansion is not possible, i.e. the alias is (mutually-)recursive, - return None. + return None. If nothing_args is True, replace all type arguments with an + UninhabitedType() (used to detect recursively defined aliases). """ - unrolled, recursed = self._partial_expansion() + unrolled, recursed = self._partial_expansion(nothing_args=nothing_args) if recursed: return None return unrolled @property def is_recursive(self) -> bool: + """Whether this type alias is recursive. + + Note this doesn't check generic alias arguments, but only if this alias + *definition* is recursive. The property value thus can be cached on the + underlying TypeAlias node. If you want to include all nested types, use + has_recursive_types() function. + """ assert self.alias is not None, "Unfixed type alias" is_recursive = self.alias._is_recursive if is_recursive is None: - is_recursive = self.expand_all_if_possible() is None + is_recursive = self.expand_all_if_possible(nothing_args=True) is None # We cache the value on the underlying TypeAlias node as an optimization, # since the value is the same for all instances of the same alias. self.alias._is_recursive = is_recursive @@ -401,7 +451,8 @@ class TypeVarId: # For plain variables (type parameters of generic classes and # functions) raw ids are allocated by semantic analysis, using # positive ids 1, 2, ... for generic class parameters and negative - # ids -1, ... for generic function type arguments. This convention + # ids -1, ... for generic function type arguments. A special value 0 + # is reserved for Self type variable (autogenerated). This convention # is only used to keep type variable ids distinct when allocating # them; the type checker makes no distinction between class and # function type variables. @@ -436,14 +487,12 @@ def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: - if isinstance(other, TypeVarId): - return ( - self.raw_id == other.raw_id - and self.meta_level == other.meta_level - and self.namespace == other.namespace - ) - else: - return False + return ( + isinstance(other, TypeVarId) + and self.raw_id == other.raw_id + and self.meta_level == other.meta_level + and self.namespace == other.namespace + ) def __ne__(self, other: object) -> bool: return not (self == other) @@ -516,27 +565,41 @@ def __init__( @staticmethod def new_unification_variable(old: TypeVarType) -> TypeVarType: new_id = TypeVarId.new(meta_level=1) + return old.copy_modified(id=new_id) + + def copy_modified( + self, + values: Bogus[list[Type]] = _dummy, + upper_bound: Bogus[Type] = _dummy, + id: Bogus[TypeVarId | int] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, + ) -> TypeVarType: return TypeVarType( - old.name, - old.fullname, - new_id, - old.values, - old.upper_bound, - old.variance, - old.line, - old.column, + self.name, + self.fullname, + self.id if id is _dummy else id, + self.values if values is _dummy else values, + self.upper_bound if upper_bound is _dummy else upper_bound, + self.variance, + self.line if line == _dummy_int else line, + self.column if column == _dummy_int else column, ) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var(self) def __hash__(self) -> int: - return hash(self.id) + return hash((self.id, self.upper_bound, tuple(self.values))) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarType): return NotImplemented - return self.id == other.id + return ( + self.id == other.id + and self.upper_bound == other.upper_bound + and self.values == other.values + ) def serialize(self) -> JsonDict: assert not self.id.is_meta_var() @@ -615,16 +678,7 @@ def __init__( @staticmethod def new_unification_variable(old: ParamSpecType) -> ParamSpecType: new_id = TypeVarId.new(meta_level=1) - return ParamSpecType( - old.name, - old.fullname, - new_id, - old.flavor, - old.upper_bound, - line=old.line, - column=old.column, - prefix=old.prefix, - ) + return old.copy_modified(id=new_id) def with_flavor(self, flavor: int) -> ParamSpecType: return ParamSpecType( @@ -640,14 +694,14 @@ def copy_modified( self, *, id: Bogus[TypeVarId | int] = _dummy, - flavor: Bogus[int] = _dummy, + flavor: int = _dummy_int, prefix: Bogus[Parameters] = _dummy, ) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, id if id is not _dummy else self.id, - flavor if flavor is not _dummy else self.flavor, + flavor if flavor != _dummy_int else self.flavor, self.upper_bound, line=self.line, column=self.column, @@ -666,13 +720,13 @@ def name_with_suffix(self) -> str: return n def __hash__(self) -> int: - return hash((self.id, self.flavor)) + return hash((self.id, self.flavor, self.prefix)) def __eq__(self, other: object) -> bool: if not isinstance(other, ParamSpecType): return NotImplemented # Upper bound can be ignored, since it's determined by flavor. - return self.id == other.id and self.flavor == other.flavor + return self.id == other.id and self.flavor == other.flavor and self.prefix == other.prefix def serialize(self) -> JsonDict: assert not self.id.is_meta_var() @@ -705,6 +759,20 @@ class TypeVarTupleType(TypeVarLikeType): See PEP646 for more information. """ + def __init__( + self, + name: str, + fullname: str, + id: TypeVarId | int, + upper_bound: Type, + tuple_fallback: Instance, + *, + line: int = -1, + column: int = -1, + ) -> None: + super().__init__(name, fullname, id, upper_bound, line=line, column=column) + self.tuple_fallback = tuple_fallback + def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return { @@ -713,13 +781,18 @@ def serialize(self) -> JsonDict: "fullname": self.fullname, "id": self.id.raw_id, "upper_bound": self.upper_bound.serialize(), + "tuple_fallback": self.tuple_fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> TypeVarTupleType: assert data[".class"] == "TypeVarTupleType" return TypeVarTupleType( - data["name"], data["fullname"], data["id"], deserialize_type(data["upper_bound"]) + data["name"], + data["fullname"], + data["id"], + deserialize_type(data["upper_bound"]), + Instance.deserialize(data["tuple_fallback"]), ) def accept(self, visitor: TypeVisitor[T]) -> T: @@ -736,8 +809,17 @@ def __eq__(self, other: object) -> bool: @staticmethod def new_unification_variable(old: TypeVarTupleType) -> TypeVarTupleType: new_id = TypeVarId.new(meta_level=1) + return old.copy_modified(id=new_id) + + def copy_modified(self, id: Bogus[TypeVarId | int] = _dummy) -> TypeVarTupleType: return TypeVarTupleType( - old.name, old.fullname, new_id, old.upper_bound, line=old.line, column=old.column + self.name, + self.fullname, + self.id if id is _dummy else id, + self.upper_bound, + self.tuple_fallback, + line=self.line, + column=self.column, ) @@ -868,7 +950,7 @@ def __init__( def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return visitor.visit_callable_argument(self) + return cast(T, visitor.visit_callable_argument(self)) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -893,7 +975,7 @@ def __init__(self, items: list[Type], line: int = -1, column: int = -1) -> None: def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return visitor.visit_type_list(self) + return cast(T, visitor.visit_type_list(self)) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -902,9 +984,7 @@ def __hash__(self) -> int: return hash(tuple(self.items)) def __eq__(self, other: object) -> bool: - if not isinstance(other, TypeList): - return False - return self.items == other.items + return isinstance(other, TypeList) and self.items == other.items class UnpackType(ProperType): @@ -980,10 +1060,10 @@ def accept(self, visitor: TypeVisitor[T]) -> T: def copy_modified( self, # Mark with Bogus because _dummy is just an object (with type Any) - type_of_any: Bogus[int] = _dummy, + type_of_any: int = _dummy_int, original_any: Bogus[AnyType | None] = _dummy, ) -> AnyType: - if type_of_any is _dummy: + if type_of_any == _dummy_int: type_of_any = self.type_of_any if original_any is _dummy: original_any = self.source_any @@ -1154,38 +1234,51 @@ def deserialize(cls, data: JsonDict) -> DeletedType: NOT_READY: Final = mypy.nodes.FakeInfo("De-serialization failure: TypeInfo not fixed") +class ExtraAttrs: + """Summary of module attributes and types. + + This is used for instances of types.ModuleType, because they can have different + attributes per instance, and for type narrowing with hasattr() checks. + """ + + def __init__( + self, + attrs: dict[str, Type], + immutable: set[str] | None = None, + mod_name: str | None = None, + ) -> None: + self.attrs = attrs + if immutable is None: + immutable = set() + self.immutable = immutable + self.mod_name = mod_name + + def __hash__(self) -> int: + return hash((tuple(self.attrs.items()), tuple(sorted(self.immutable)))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ExtraAttrs): + return NotImplemented + return self.attrs == other.attrs and self.immutable == other.immutable + + def copy(self) -> ExtraAttrs: + return ExtraAttrs(self.attrs.copy(), self.immutable.copy(), self.mod_name) + + def __repr__(self) -> str: + return f"ExtraAttrs({self.attrs!r}, {self.immutable!r}, {self.mod_name!r})" + + class Instance(ProperType): """An instance type of form C[T1, ..., Tn]. The list of type variables may be empty. - Several types has fallbacks to `Instance`. Why? - Because, for example `TupleTuple` is related to `builtins.tuple` instance. - And `FunctionLike` has `builtins.function` fallback. - This allows us to use types defined - in typeshed for our "special" and more precise types. - - We used to have this helper function to get a fallback from different types. - Note, that it might be incomplete, since it is not used and not updated. - It just illustrates the concept: - - def try_getting_instance_fallback(typ: ProperType) -> Optional[Instance]: - '''Returns the Instance fallback for this type if one exists or None.''' - if isinstance(typ, Instance): - return typ - elif isinstance(typ, TupleType): - return tuple_fallback(typ) - elif isinstance(typ, TypedDictType): - return typ.fallback - elif isinstance(typ, FunctionLike): - return typ.fallback - elif isinstance(typ, LiteralType): - return typ.fallback - return None - + Several types have fallbacks to `Instance`, because in Python everything is an object + and this concept is impossible to express without intersection types. We therefore use + fallbacks for all "non-special" (like UninhabitedType, ErasedType etc) types. """ - __slots__ = ("type", "args", "invalid", "type_ref", "last_known_value", "_hash") + __slots__ = ("type", "args", "invalid", "type_ref", "last_known_value", "_hash", "extra_attrs") def __init__( self, @@ -1195,6 +1288,7 @@ def __init__( column: int = -1, *, last_known_value: LiteralType | None = None, + extra_attrs: ExtraAttrs | None = None, ) -> None: super().__init__(line, column) self.type = typ @@ -1252,12 +1346,17 @@ def __init__( # Cached hash value self._hash = -1 + # Additional attributes defined per instance of this type. For example modules + # have different attributes per instance of types.ModuleType. This is intended + # to be "short-lived", we don't serialize it, and even don't store as variable type. + self.extra_attrs = extra_attrs + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_instance(self) def __hash__(self) -> int: if self._hash == -1: - self._hash = hash((self.type, self.args, self.last_known_value)) + self._hash = hash((self.type, self.args, self.last_known_value, self.extra_attrs)) return self._hash def __eq__(self, other: object) -> bool: @@ -1267,6 +1366,7 @@ def __eq__(self, other: object) -> bool: self.type == other.type and self.args == other.args and self.last_known_value == other.last_known_value + and self.extra_attrs == other.extra_attrs ) def serialize(self) -> JsonDict | str: @@ -1314,10 +1414,21 @@ def copy_modified( if last_known_value is not _dummy else self.last_known_value, ) + # We intentionally don't copy the extra_attrs here, so they will be erased. new.can_be_true = self.can_be_true new.can_be_false = self.can_be_false return new + def copy_with_extra_attr(self, name: str, typ: Type) -> Instance: + if self.extra_attrs: + existing_attrs = self.extra_attrs.copy() + else: + existing_attrs = ExtraAttrs({}, set(), None) + existing_attrs.attrs[name] = typ + new = self.copy_modified() + new.extra_attrs = existing_attrs + return new + def has_readable_member(self, name: str) -> bool: return self.type.has_readable_member(name) @@ -1346,7 +1457,7 @@ class FunctionLike(ProperType): def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) - self.can_be_false = False + self._can_be_false = False @abstractmethod def is_type_obj(self) -> bool: @@ -1561,6 +1672,9 @@ def __eq__(self, other: object) -> bool: return NotImplemented +CT = TypeVar("CT", bound="CallableType") + + class CallableType(FunctionLike): """Type of a non-overloaded callable object (such as function).""" @@ -1590,6 +1704,7 @@ class CallableType(FunctionLike): "type_guard", # T, if -> TypeGuard[T] (ret_type is bool in this case). "from_concatenate", # whether this callable is from a concatenate object # (this is used for error messages) + "unpack_kwargs", # Was an Unpack[...] with **kwargs used to define this callable? ) def __init__( @@ -1613,6 +1728,7 @@ def __init__( def_extras: dict[str, Any] | None = None, type_guard: Type | None = None, from_concatenate: bool = False, + unpack_kwargs: bool = False, ) -> None: super().__init__(line, column) assert len(arg_types) == len(arg_kinds) == len(arg_names) @@ -1653,9 +1769,10 @@ def __init__( else: self.def_extras = {} self.type_guard = type_guard + self.unpack_kwargs = unpack_kwargs def copy_modified( - self, + self: CT, arg_types: Bogus[Sequence[Type]] = _dummy, arg_kinds: Bogus[list[ArgKind]] = _dummy, arg_names: Bogus[list[str | None]] = _dummy, @@ -1664,8 +1781,8 @@ def copy_modified( name: Bogus[str | None] = _dummy, definition: Bogus[SymbolNode] = _dummy, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, - line: Bogus[int] = _dummy, - column: Bogus[int] = _dummy, + line: int = _dummy_int, + column: int = _dummy_int, is_ellipsis_args: Bogus[bool] = _dummy, implicit: Bogus[bool] = _dummy, special_sig: Bogus[str | None] = _dummy, @@ -1674,8 +1791,9 @@ def copy_modified( def_extras: Bogus[dict[str, Any]] = _dummy, type_guard: Bogus[Type | None] = _dummy, from_concatenate: Bogus[bool] = _dummy, - ) -> CallableType: - return CallableType( + unpack_kwargs: Bogus[bool] = _dummy, + ) -> CT: + modified = CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, @@ -1684,8 +1802,8 @@ def copy_modified( name=name if name is not _dummy else self.name, definition=definition if definition is not _dummy else self.definition, variables=variables if variables is not _dummy else self.variables, - line=line if line is not _dummy else self.line, - column=column if column is not _dummy else self.column, + line=line if line != _dummy_int else self.line, + column=column if column != _dummy_int else self.column, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args ), @@ -1698,7 +1816,11 @@ def copy_modified( from_concatenate=( from_concatenate if from_concatenate is not _dummy else self.from_concatenate ), + unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs, ) + # Optimization: Only NewTypes are supported as subtypes since + # the class is effectively final, so we can use a cast safely. + return cast(CT, modified) def var_arg(self) -> FormalArgument | None: """The formal argument for *args.""" @@ -1725,7 +1847,9 @@ def is_kw_arg(self) -> bool: return ARG_STAR2 in self.arg_kinds def is_type_obj(self) -> bool: - return self.fallback.type.is_metaclass() + return self.fallback.type.is_metaclass() and not isinstance( + get_proper_type(self.ret_type), UninhabitedType + ) def type_object(self) -> mypy.nodes.TypeInfo: assert self.is_type_obj() @@ -1889,6 +2013,27 @@ def expand_param_spec( variables=[*variables, *self.variables], ) + def with_unpacked_kwargs(self) -> NormalizedCallableType: + if not self.unpack_kwargs: + return cast(NormalizedCallableType, self) + last_type = get_proper_type(self.arg_types[-1]) + assert isinstance(last_type, TypedDictType) + extra_kinds = [ + ArgKind.ARG_NAMED if name in last_type.required_keys else ArgKind.ARG_NAMED_OPT + for name in last_type.items + ] + new_arg_kinds = self.arg_kinds[:-1] + extra_kinds + new_arg_names = self.arg_names[:-1] + list(last_type.items) + new_arg_types = self.arg_types[:-1] + list(last_type.items.values()) + return NormalizedCallableType( + self.copy_modified( + arg_kinds=new_arg_kinds, + arg_names=new_arg_names, + arg_types=new_arg_types, + unpack_kwargs=False, + ) + ) + def __hash__(self) -> int: # self.is_type_obj() will fail if self.fallback.type is a FakeInfo if isinstance(self.fallback.type, FakeInfo): @@ -1904,6 +2049,7 @@ def __hash__(self) -> int: tuple(self.arg_types), tuple(self.arg_names), tuple(self.arg_kinds), + self.fallback, ) ) @@ -1917,6 +2063,7 @@ def __eq__(self, other: object) -> bool: and self.name == other.name and self.is_type_obj() == other.is_type_obj() and self.is_ellipsis_args == other.is_ellipsis_args + and self.fallback == other.fallback ) else: return NotImplemented @@ -1940,6 +2087,7 @@ def serialize(self) -> JsonDict: "def_extras": dict(self.def_extras), "type_guard": self.type_guard.serialize() if self.type_guard is not None else None, "from_concatenate": self.from_concatenate, + "unpack_kwargs": self.unpack_kwargs, } @classmethod @@ -1962,9 +2110,16 @@ def deserialize(cls, data: JsonDict) -> CallableType: deserialize_type(data["type_guard"]) if data["type_guard"] is not None else None ), from_concatenate=data["from_concatenate"], + unpack_kwargs=data["unpack_kwargs"], ) +# This is a little safety net to prevent reckless special-casing of callables +# that can potentially break Unpack[...] with **kwargs. +# TODO: use this in more places in checkexpr.py etc? +NormalizedCallableType = NewType("NormalizedCallableType", CallableType) + + class Overloaded(FunctionLike): """Overloaded function type T1, ... Tn, where each Ti is CallableType. @@ -2009,6 +2164,11 @@ def with_name(self, name: str) -> Overloaded: def get_name(self) -> str | None: return self._items[0].name + def with_unpacked_kwargs(self) -> Overloaded: + if any(i.unpack_kwargs for i in self.items): + return Overloaded([i.with_unpacked_kwargs() for i in self.items]) + return self + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_overloaded(self) @@ -2056,10 +2216,10 @@ def __init__( column: int = -1, implicit: bool = False, ) -> None: + super().__init__(line, column) self.partial_fallback = fallback self.items = items self.implicit = implicit - super().__init__(line, column) def can_be_true_default(self) -> bool: if self.can_be_any_bool(): @@ -2180,16 +2340,19 @@ def __hash__(self) -> int: return hash((frozenset(self.items.items()), self.fallback, frozenset(self.required_keys))) def __eq__(self, other: object) -> bool: - if isinstance(other, TypedDictType): - if frozenset(self.items.keys()) != frozenset(other.items.keys()): - return False - for (_, left_item_type, right_item_type) in self.zip(other): - if not left_item_type == right_item_type: - return False - return self.fallback == other.fallback and self.required_keys == other.required_keys - else: + if not isinstance(other, TypedDictType): return NotImplemented + return ( + frozenset(self.items.keys()) == frozenset(other.items.keys()) + and all( + left_item_type == right_item_type + for (_, left_item_type, right_item_type) in self.zip(other) + ) + and self.fallback == other.fallback + and self.required_keys == other.required_keys + ) + def serialize(self) -> JsonDict: return { ".class": "TypedDictType", @@ -2207,6 +2370,10 @@ def deserialize(cls, data: JsonDict) -> TypedDictType: Instance.deserialize(data["fallback"]), ) + @property + def is_final(self) -> bool: + return self.fallback.type.is_final + def is_anonymous(self) -> bool: return self.fallback.type.fullname in TPDICT_FB_NAMES @@ -2322,7 +2489,7 @@ def simple_name(self) -> str: def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return visitor.visit_raw_expression_type(self) + return cast(T, visitor.visit_raw_expression_type(self)) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -2361,8 +2528,8 @@ class LiteralType(ProperType): def __init__( self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1 ) -> None: - self.value = value super().__init__(line, column) + self.value = value self.fallback = fallback self._hash = -1 # Cached hash value @@ -2429,28 +2596,6 @@ def is_singleton_type(self) -> bool: return self.is_enum_literal() or isinstance(self.value, bool) -class StarType(ProperType): - """The star type *type_parameter. - - This is not a real type but a syntactic AST construct. - """ - - __slots__ = ("type",) - - type: Type - - def __init__(self, type: Type, line: int = -1, column: int = -1) -> None: - super().__init__(line, column) - self.type = type - - def accept(self, visitor: TypeVisitor[T]) -> T: - assert isinstance(visitor, SyntheticTypeVisitor) - return visitor.visit_star_type(self) - - def serialize(self) -> JsonDict: - assert False, "Synthetic types don't serialize" - - class UnionType(ProperType): """The union type Union[T1, ..., Tn] (at least one type argument).""" @@ -2468,13 +2613,17 @@ def __init__( # We must keep this false to avoid crashes during semantic analysis. # TODO: maybe switch this to True during type-checking pass? self.items = flatten_nested_unions(items, handle_type_alias_type=False) - self.can_be_true = any(item.can_be_true for item in items) - self.can_be_false = any(item.can_be_false for item in items) # is_evaluated should be set to false for type comments and string literals self.is_evaluated = is_evaluated # uses_pep604_syntax is True if Union uses OR syntax (X | Y) self.uses_pep604_syntax = uses_pep604_syntax + def can_be_true_default(self) -> bool: + return any(item.can_be_true for item in self.items) + + def can_be_false_default(self) -> bool: + return any(item.can_be_false for item in self.items) + def __hash__(self) -> int: return hash(frozenset(self.items)) @@ -2587,7 +2736,7 @@ class EllipsisType(ProperType): def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return visitor.visit_ellipsis_type(self) + return cast(T, visitor.visit_ellipsis_type(self)) def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -2696,7 +2845,15 @@ def __init__(self, fullname: str | None, args: list[Type], line: int) -> None: def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return visitor.visit_placeholder_type(self) + return cast(T, visitor.visit_placeholder_type(self)) + + def __hash__(self) -> int: + return hash((self.fullname, tuple(self.args))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, PlaceholderType): + return NotImplemented + return self.fullname == other.fullname and self.args == other.args def serialize(self) -> str: # We should never get here since all placeholders should be replaced @@ -2729,30 +2886,45 @@ def get_proper_type(typ: Type | None) -> ProperType | None: typ = typ.type_guard while isinstance(typ, TypeAliasType): typ = typ._expand_once() - assert isinstance(typ, ProperType), typ # TODO: store the name of original type alias on this type, so we can show it in errors. - return typ + return cast(ProperType, typ) @overload -def get_proper_types(it: Iterable[Type]) -> list[ProperType]: # type: ignore[misc] +def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: # type: ignore[misc] ... @overload -def get_proper_types(it: Iterable[Type | None]) -> list[ProperType | None]: +def get_proper_types( + types: list[Type | None] | tuple[Type | None, ...] +) -> list[ProperType | None]: ... -def get_proper_types(it: Iterable[Type | None]) -> list[ProperType] | list[ProperType | None]: - return [get_proper_type(t) for t in it] +def get_proper_types( + types: list[Type] | list[Type | None] | tuple[Type | None, ...] +) -> list[ProperType] | list[ProperType | None]: + if isinstance(types, list): + typelist = types + # Optimize for the common case so that we don't need to allocate anything + if not any( + isinstance(t, (TypeAliasType, TypeGuardedType)) for t in typelist # type: ignore[misc] + ): + return cast("list[ProperType]", typelist) + return [get_proper_type(t) for t in typelist] + else: + return [get_proper_type(t) for t in types] # We split off the type visitor base classes to another module # to make it easier to gradually get modules working with mypyc. # Import them here, after the types are defined. # This is intended as a re-export also. -from mypy.type_visitor import ( # noqa: F811 +from mypy.type_visitor import ( # noqa: F811,F401 + ALL_STRATEGY as ALL_STRATEGY, + ANY_STRATEGY as ANY_STRATEGY, + BoolTypeQuery as BoolTypeQuery, SyntheticTypeVisitor as SyntheticTypeVisitor, TypeQuery as TypeQuery, TypeTranslator as TypeTranslator, @@ -2917,7 +3089,10 @@ def visit_callable_type(self, t: CallableType) -> str: name = t.arg_names[i] if name: s += name + ": " - s += t.arg_types[i].accept(self) + type_str = t.arg_types[i].accept(self) + if t.arg_kinds[i] == ARG_STAR2 and t.unpack_kwargs: + type_str = f"Unpack[{type_str}]" + s += type_str if t.arg_kinds[i].is_optional(): s += " =" @@ -2992,10 +3167,6 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> str: def visit_literal_type(self, t: LiteralType) -> str: return f"Literal[{t.value_repr()}]" - def visit_star_type(self, t: StarType) -> str: - s = t.type.accept(self) - return f"*{s}" - def visit_union_type(self, t: UnionType) -> str: s = self.list_str(t.items) return f"Union[{s}]" @@ -3052,9 +3223,6 @@ def visit_ellipsis_type(self, t: EllipsisType) -> Type: def visit_raw_expression_type(self, t: RawExpressionType) -> Type: return t - def visit_star_type(self, t: StarType) -> Type: - return t - def visit_type_list(self, t: TypeList) -> Type: return t @@ -3091,7 +3259,7 @@ def strip_type(typ: Type) -> Type: return orig_typ -def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> bool: +def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> TypeGuard[Instance]: if not isinstance(fullnames, tuple): fullnames = (fullnames,) @@ -3100,24 +3268,45 @@ def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> bool: class InstantiateAliasVisitor(TrivialSyntheticTypeTranslator): - def __init__(self, vars: list[str], subs: list[Type]) -> None: - self.replacements = {v: s for (v, s) in zip(vars, subs)} + def __init__(self, vars: list[TypeVarLikeType], subs: list[Type]) -> None: + self.replacements = {v.id: s for (v, s) in zip(vars, subs)} def visit_type_alias_type(self, typ: TypeAliasType) -> Type: return typ.copy_modified(args=[t.accept(self) for t in typ.args]) - def visit_unbound_type(self, typ: UnboundType) -> Type: - # TODO: stop using unbound type variables for type aliases. - # Now that type aliases are very similar to TypeInfos we should - # make type variable tracking similar as well. Maybe we can even support - # upper bounds etc. for generic type aliases. - if typ.name in self.replacements: - return self.replacements[typ.name] + def visit_type_var(self, typ: TypeVarType) -> Type: + if typ.id in self.replacements: + return self.replacements[typ.id] return typ - def visit_type_var(self, typ: TypeVarType) -> Type: - if typ.name in self.replacements: - return self.replacements[typ.name] + def visit_callable_type(self, t: CallableType) -> Type: + param_spec = t.param_spec() + if param_spec is not None: + # TODO: this branch duplicates the one in expand_type(), find a way to reuse it + # without import cycle types <-> typeanal <-> expandtype. + repl = get_proper_type(self.replacements.get(param_spec.id)) + if isinstance(repl, CallableType) or isinstance(repl, Parameters): + prefix = param_spec.prefix + t = t.expand_param_spec(repl, no_prefix=True) + return t.copy_modified( + arg_types=[t.accept(self) for t in prefix.arg_types] + t.arg_types, + arg_kinds=prefix.arg_kinds + t.arg_kinds, + arg_names=prefix.arg_names + t.arg_names, + ret_type=t.ret_type.accept(self), + type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), + ) + return super().visit_callable_type(t) + + def visit_param_spec(self, typ: ParamSpecType) -> Type: + if typ.id in self.replacements: + repl = get_proper_type(self.replacements[typ.id]) + # TODO: all the TODOs from same logic in expand_type() apply here. + if isinstance(repl, Instance): + return repl + elif isinstance(repl, (ParamSpecType, Parameters, CallableType)): + return expand_param_spec(typ, repl) + else: + return repl return typ @@ -3134,7 +3323,7 @@ def visit_instance(self, typ: Instance) -> None: def replace_alias_tvars( - tp: Type, vars: list[str], subs: list[Type], newline: int, newcolumn: int + tp: Type, vars: list[TypeVarLikeType], subs: list[Type], newline: int, newcolumn: int ) -> Type: """Replace type variables in a generic type alias tp with substitutions subs resetting context. Length of subs should be already checked. @@ -3147,39 +3336,68 @@ def replace_alias_tvars( return new_tp -class HasTypeVars(TypeQuery[bool]): +class HasTypeVars(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) + self.skip_alias_target = True def visit_type_var(self, t: TypeVarType) -> bool: return True + def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: + return True + + def visit_param_spec(self, t: ParamSpecType) -> bool: + return True + def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" return typ.accept(HasTypeVars()) -class HasRecursiveType(TypeQuery[bool]): +class HasRecursiveType(BoolTypeQuery): def __init__(self) -> None: - super().__init__(any) + super().__init__(ANY_STRATEGY) def visit_type_alias_type(self, t: TypeAliasType) -> bool: - return t.is_recursive + return t.is_recursive or self.query_types(t.args) + + +# Use singleton since this is hot (note: call reset() before using) +_has_recursive_type: Final = HasRecursiveType() def has_recursive_types(typ: Type) -> bool: """Check if a type contains any recursive aliases (recursively).""" - return typ.accept(HasRecursiveType()) + _has_recursive_type.reset() + return typ.accept(_has_recursive_type) + + +def _flattened(types: Iterable[Type]) -> Iterable[Type]: + for t in types: + tp = get_proper_type(t) + if isinstance(tp, UnionType): + yield from _flattened(tp.items) + else: + yield t def flatten_nested_unions( - types: Iterable[Type], handle_type_alias_type: bool = True + types: Sequence[Type], handle_type_alias_type: bool = True ) -> list[Type]: """Flatten nested unions in a type list.""" + if not isinstance(types, list): + typelist = list(types) + else: + typelist = cast("list[Type]", types) + + # Fast path: most of the time there is nothing to flatten + if not any(isinstance(t, (TypeAliasType, UnionType)) for t in typelist): # type: ignore[misc] + return typelist + flat_items: list[Type] = [] - # TODO: avoid duplicate types in unions (e.g. using hash) - for t in types: + for t in typelist: tp = get_proper_type(t) if handle_type_alias_type else t if isinstance(tp, ProperType) and isinstance(tp, UnionType): flat_items.extend( @@ -3260,11 +3478,21 @@ def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue """Check if this type is a LiteralType with the given fallback type and value.""" if isinstance(typ, Instance) and typ.last_known_value: typ = typ.last_known_value - if not isinstance(typ, LiteralType): - return False - if typ.fallback.type.fullname != fallback_fullname: + return ( + isinstance(typ, LiteralType) + and typ.fallback.type.fullname == fallback_fullname + and typ.value == value + ) + + +def is_self_type_like(typ: Type, *, is_classmethod: bool) -> bool: + """Does this look like a self-type annotation?""" + typ = get_proper_type(typ) + if not is_classmethod: + return isinstance(typ, TypeVarType) + if not isinstance(typ, TypeType): return False - return typ.value == value + return isinstance(typ.item, TypeVarType) names: Final = globals().copy() @@ -3286,3 +3514,100 @@ def callable_with_ellipsis(any_type: AnyType, ret_type: Type, fallback: Instance fallback=fallback, is_ellipsis_args=True, ) + + +def expand_param_spec( + t: ParamSpecType, repl: ParamSpecType | Parameters | CallableType +) -> ProperType: + """This is shared part of the logic w.r.t. ParamSpec instantiation. + + It is shared between type aliases and proper types, that currently use somewhat different + logic for instantiation.""" + if isinstance(repl, ParamSpecType): + return repl.copy_modified( + flavor=t.flavor, + prefix=t.prefix.copy_modified( + arg_types=t.prefix.arg_types + repl.prefix.arg_types, + arg_kinds=t.prefix.arg_kinds + repl.prefix.arg_kinds, + arg_names=t.prefix.arg_names + repl.prefix.arg_names, + ), + ) + else: + # if the paramspec is *P.args or **P.kwargs: + if t.flavor != ParamSpecFlavor.BARE: + assert isinstance(repl, CallableType), "Should not be able to get here." + # Is this always the right thing to do? + param_spec = repl.param_spec() + if param_spec: + return param_spec.with_flavor(t.flavor) + else: + return repl + else: + return Parameters( + t.prefix.arg_types + repl.arg_types, + t.prefix.arg_kinds + repl.arg_kinds, + t.prefix.arg_names + repl.arg_names, + variables=[*t.prefix.variables, *repl.variables], + ) + + +def store_argument_type( + defn: FuncItem, i: int, typ: CallableType, named_type: Callable[[str, list[Type]], Instance] +) -> None: + arg_type = typ.arg_types[i] + if typ.arg_kinds[i] == ARG_STAR: + if isinstance(arg_type, ParamSpecType): + pass + elif isinstance(arg_type, UnpackType): + unpacked_type = get_proper_type(arg_type.type) + if isinstance(unpacked_type, TupleType): + # Instead of using Tuple[Unpack[Tuple[...]]], just use + # Tuple[...] + arg_type = unpacked_type + elif ( + isinstance(unpacked_type, Instance) + and unpacked_type.type.fullname == "builtins.tuple" + ): + arg_type = unpacked_type + else: + arg_type = TupleType( + [arg_type], + fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]), + ) + else: + # builtins.tuple[T] is typing.Tuple[T, ...] + arg_type = named_type("builtins.tuple", [arg_type]) + elif typ.arg_kinds[i] == ARG_STAR2: + if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: + arg_type = named_type("builtins.dict", [named_type("builtins.str", []), arg_type]) + defn.arguments[i].variable.type = arg_type + + +def remove_trivial(types: Iterable[Type]) -> list[Type]: + """Make trivial simplifications on a list of types without calling is_subtype(). + + This makes following simplifications: + * Remove bottom types (taking into account strict optional setting) + * Remove everything else if there is an `object` + * Remove strict duplicate types + """ + removed_none = False + new_types = [] + all_types = set() + for t in types: + p_t = get_proper_type(t) + if isinstance(p_t, UninhabitedType): + continue + if isinstance(p_t, NoneType) and not state.strict_optional: + removed_none = True + continue + if isinstance(p_t, Instance) and p_t.type.fullname == "builtins.object": + return [p_t] + if p_t not in all_types: + new_types.append(t) + all_types.add(p_t) + if new_types: + return new_types + if removed_none: + return [NoneType()] + return [UninhabitedType()] diff --git a/mypy/typeshed/LICENSE b/mypy/typeshed/LICENSE index e5833ae4231d..13264487581f 100644 --- a/mypy/typeshed/LICENSE +++ b/mypy/typeshed/LICENSE @@ -235,4 +235,3 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. = = = = = - diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS index d396ce4d0560..bd1abd204885 100644 --- a/mypy/typeshed/stdlib/VERSIONS +++ b/mypy/typeshed/stdlib/VERSIONS @@ -27,6 +27,7 @@ _collections_abc: 3.3- _compat_pickle: 3.1- _compression: 3.5- _csv: 2.7- +_ctypes: 2.7- _curses: 2.7- _decimal: 3.3- _dummy_thread: 3.0-3.8 diff --git a/mypy/typeshed/stdlib/_ast.pyi b/mypy/typeshed/stdlib/_ast.pyi index c68e921babd0..7bc47266d713 100644 --- a/mypy/typeshed/stdlib/_ast.pyi +++ b/mypy/typeshed/stdlib/_ast.pyi @@ -104,14 +104,14 @@ class Assign(stmt): class AugAssign(stmt): if sys.version_info >= (3, 10): __match_args__ = ("target", "op", "value") - target: expr + target: Name | Attribute | Subscript op: operator value: expr class AnnAssign(stmt): if sys.version_info >= (3, 10): __match_args__ = ("target", "annotation", "value", "simple") - target: expr + target: Name | Attribute | Subscript annotation: expr value: expr | None simple: int @@ -194,7 +194,7 @@ class Import(stmt): class ImportFrom(stmt): if sys.version_info >= (3, 10): __match_args__ = ("module", "names", "level") - module: _Identifier | None + module: str | None names: list[alias] level: int @@ -329,7 +329,7 @@ class JoinedStr(expr): if sys.version_info < (3, 8): class Num(expr): # Deprecated in 3.8; use Constant - n: complex + n: int | float | complex class Str(expr): # Deprecated in 3.8; use Constant s: str @@ -349,13 +349,13 @@ class Constant(expr): kind: str | None # Aliases for value, for backwards compatibility s: Any - n: complex + n: int | float | complex if sys.version_info >= (3, 8): class NamedExpr(expr): if sys.version_info >= (3, 10): __match_args__ = ("target", "value") - target: expr + target: Name value: expr class Attribute(expr): diff --git a/mypy/typeshed/stdlib/_bisect.pyi b/mypy/typeshed/stdlib/_bisect.pyi index d902e1eea7d4..4c79eec14d72 100644 --- a/mypy/typeshed/stdlib/_bisect.pyi +++ b/mypy/typeshed/stdlib/_bisect.pyi @@ -8,67 +8,67 @@ _T = TypeVar("_T") if sys.version_info >= (3, 10): @overload def bisect_left( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ..., *, key: None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None, *, key: None = None ) -> int: ... @overload def bisect_left( a: Sequence[_T], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: Callable[[_T], SupportsRichComparisonT] = ..., + key: Callable[[_T], SupportsRichComparisonT], ) -> int: ... @overload def bisect_right( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ..., *, key: None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None, *, key: None = None ) -> int: ... @overload def bisect_right( a: Sequence[_T], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: Callable[[_T], SupportsRichComparisonT] = ..., + key: Callable[[_T], SupportsRichComparisonT], ) -> int: ... @overload def insort_left( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: None = ..., + key: None = None, ) -> None: ... @overload def insort_left( - a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsRichComparisonT] = ... + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] ) -> None: ... @overload def insort_right( a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, - lo: int = ..., - hi: int | None = ..., + lo: int = 0, + hi: int | None = None, *, - key: None = ..., + key: None = None, ) -> None: ... @overload def insort_right( - a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsRichComparisonT] = ... + a: MutableSequence[_T], x: _T, lo: int = 0, hi: int | None = None, *, key: Callable[[_T], SupportsRichComparisonT] ) -> None: ... else: def bisect_left( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> int: ... def bisect_right( - a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> int: ... def insort_left( - a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> None: ... def insort_right( - a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = 0, hi: int | None = None ) -> None: ... diff --git a/mypy/typeshed/stdlib/_bootlocale.pyi b/mypy/typeshed/stdlib/_bootlocale.pyi index ee2d89347a9f..233d4934f3c6 100644 --- a/mypy/typeshed/stdlib/_bootlocale.pyi +++ b/mypy/typeshed/stdlib/_bootlocale.pyi @@ -1 +1 @@ -def getpreferredencoding(do_setlocale: bool = ...) -> str: ... +def getpreferredencoding(do_setlocale: bool = True) -> str: ... diff --git a/mypy/typeshed/stdlib/_codecs.pyi b/mypy/typeshed/stdlib/_codecs.pyi index 9241ac6a7038..51f17f01ca71 100644 --- a/mypy/typeshed/stdlib/_codecs.pyi +++ b/mypy/typeshed/stdlib/_codecs.pyi @@ -1,5 +1,6 @@ import codecs import sys +from _typeshed import ReadableBuffer from collections.abc import Callable from typing import overload from typing_extensions import Literal, TypeAlias @@ -44,86 +45,94 @@ _BytesToBytesEncoding: TypeAlias = Literal[ _StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] @overload -def encode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... @overload -def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # type: ignore[misc] @overload -def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... +def encode(obj: str, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... @overload -def decode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = "strict") -> bytes: ... # type: ignore[misc] @overload -def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = "strict") -> str: ... # these are documented as text encodings but in practice they also accept str as input @overload def decode( - obj: str, encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], errors: str = ... + obj: str, + encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], + errors: str = "strict", ) -> str: ... # hex is officially documented as a bytes to bytes encoding, but it appears to also work with str @overload -def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = "strict") -> bytes: ... @overload -def decode(obj: bytes, encoding: str = ..., errors: str = ...) -> str: ... +def decode(obj: ReadableBuffer, encoding: str = "utf-8", errors: str = "strict") -> str: ... def lookup(__encoding: str) -> codecs.CodecInfo: ... def charmap_build(__map: str) -> _CharMap: ... -def ascii_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... -def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def charmap_decode(__data: bytes, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... -def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[bytes, int]: ... -def escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... -def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... -def latin_1_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... -def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def ascii_decode(__data: ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def ascii_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def charmap_decode(__data: ReadableBuffer, __errors: str | None = None, __mapping: _CharMap | None = None) -> tuple[str, int]: ... +def charmap_encode(__str: str, __errors: str | None = None, __mapping: _CharMap | None = None) -> tuple[bytes, int]: ... +def escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def escape_encode(__data: bytes, __errors: str | None = None) -> tuple[bytes, int]: ... +def latin_1_decode(__data: ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... +def latin_1_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): - def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = None, __final: bool = True + ) -> tuple[str, int]: ... else: - def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... -def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def readbuffer_encode(__data: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... +def raw_unicode_escape_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.version_info >= (3, 9): - def unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = None, __final: bool = True + ) -> tuple[str, int]: ... else: - def unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... -def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def unicode_escape_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.version_info < (3, 8): - def unicode_internal_decode(__obj: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... - def unicode_internal_encode(__obj: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... + def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = None) -> tuple[bytes, int]: ... -def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_16_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_16_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... def utf_16_ex_decode( - __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: bool = False ) -> tuple[str, int, int]: ... -def utf_16_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_32_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_16_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_32_be_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_32_encode(__str: str, __errors: str | None = None, __byteorder: int = 0) -> tuple[bytes, int]: ... def utf_32_ex_decode( - __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... + __data: ReadableBuffer, __errors: str | None = None, __byteorder: int = 0, __final: bool = False ) -> tuple[str, int, int]: ... -def utf_32_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_7_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_8_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... -def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_32_le_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_7_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... +def utf_8_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... if sys.platform == "win32": - def mbcs_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... - def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def code_page_decode(__codepage: int, __data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... - def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... - def oem_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... - def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... + def mbcs_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + def code_page_decode( + __codepage: int, __data: ReadableBuffer, __errors: str | None = None, __final: bool = False + ) -> tuple[str, int]: ... + def code_page_encode(__code_page: int, __str: str, __errors: str | None = None) -> tuple[bytes, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... + def oem_encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... diff --git a/mypy/typeshed/stdlib/_collections_abc.pyi b/mypy/typeshed/stdlib/_collections_abc.pyi index 8373fe836330..352da6cfb331 100644 --- a/mypy/typeshed/stdlib/_collections_abc.pyi +++ b/mypy/typeshed/stdlib/_collections_abc.pyi @@ -1,6 +1,6 @@ import sys from types import MappingProxyType -from typing import ( # noqa: Y027,Y038 +from typing import ( # noqa: Y022,Y038 AbstractSet as Set, AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, diff --git a/mypy/typeshed/stdlib/_compression.pyi b/mypy/typeshed/stdlib/_compression.pyi index 7047a7bcd325..817f251586b2 100644 --- a/mypy/typeshed/stdlib/_compression.pyi +++ b/mypy/typeshed/stdlib/_compression.pyi @@ -21,5 +21,5 @@ class DecompressReader(RawIOBase): **decomp_args: Any, ) -> None: ... def readinto(self, b: WriteableBuffer) -> int: ... - def read(self, size: int = ...) -> bytes: ... - def seek(self, offset: int, whence: int = ...) -> int: ... + def read(self, size: int = -1) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... diff --git a/mypy/typeshed/stdlib/_csv.pyi b/mypy/typeshed/stdlib/_csv.pyi index 7d15365d3b02..7e9b9e4e7a79 100644 --- a/mypy/typeshed/stdlib/_csv.pyi +++ b/mypy/typeshed/stdlib/_csv.pyi @@ -1,9 +1,9 @@ from _typeshed import SupportsWrite from collections.abc import Iterable, Iterator -from typing import Any, Union -from typing_extensions import Literal, TypeAlias +from typing import Any +from typing_extensions import Final, Literal, TypeAlias -__version__: str +__version__: Final[str] QUOTE_ALL: Literal[1] QUOTE_MINIMAL: Literal[0] @@ -27,7 +27,7 @@ class Dialect: strict: bool def __init__(self) -> None: ... -_DialectLike: TypeAlias = Union[str, Dialect, type[Dialect]] +_DialectLike: TypeAlias = str | Dialect | type[Dialect] class _reader(Iterator[list[str]]): @property diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi new file mode 100644 index 000000000000..0ad2fcb571b8 --- /dev/null +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -0,0 +1,29 @@ +import sys +from ctypes import _CArgObject, _PointerLike +from typing_extensions import TypeAlias + +FUNCFLAG_CDECL: int +FUNCFLAG_PYTHONAPI: int +FUNCFLAG_USE_ERRNO: int +FUNCFLAG_USE_LASTERROR: int +RTLD_GLOBAL: int +RTLD_LOCAL: int + +if sys.version_info >= (3, 11): + CTYPES_MAX_ARGCOUNT: int + +if sys.platform == "win32": + # Description, Source, HelpFile, HelpContext, scode + _COMError_Details: TypeAlias = tuple[str | None, str | None, str | None, int | None, int | None] + + class COMError(Exception): + hresult: int + text: str | None + details: _COMError_Details + + def __init__(self, hresult: int, text: str | None, details: _COMError_Details) -> None: ... + + def CopyComPointer(src: _PointerLike, dst: _PointerLike | _CArgObject) -> int: ... + + FUNCFLAG_HRESULT: int + FUNCFLAG_STDCALL: int diff --git a/mypy/typeshed/stdlib/_curses.pyi b/mypy/typeshed/stdlib/_curses.pyi index adb1ea84e45b..61881fc09199 100644 --- a/mypy/typeshed/stdlib/_curses.pyi +++ b/mypy/typeshed/stdlib/_curses.pyi @@ -1,9 +1,10 @@ import sys -from _typeshed import SupportsRead +from _typeshed import ReadOnlyBuffer, SupportsRead from typing import IO, Any, NamedTuple, overload from typing_extensions import TypeAlias, final if sys.platform != "win32": + # Handled by PyCurses_ConvertToChtype in _cursesmodule.c. _ChType: TypeAlias = str | bytes | int # ACS codes are only initialized after initscr is called @@ -273,7 +274,7 @@ if sys.platform != "win32": def baudrate() -> int: ... def beep() -> None: ... def can_change_color() -> bool: ... - def cbreak(__flag: bool = ...) -> None: ... + def cbreak(__flag: bool = True) -> None: ... def color_content(__color_number: int) -> tuple[int, int, int]: ... # Changed in Python 3.8.8 and 3.9.2 if sys.version_info >= (3, 8): @@ -286,7 +287,7 @@ if sys.platform != "win32": def def_shell_mode() -> None: ... def delay_output(__ms: int) -> None: ... def doupdate() -> None: ... - def echo(__flag: bool = ...) -> None: ... + def echo(__flag: bool = True) -> None: ... def endwin() -> None: ... def erasechar() -> bytes: ... def filter() -> None: ... @@ -322,7 +323,7 @@ if sys.platform != "win32": def napms(__ms: int) -> int: ... def newpad(__nlines: int, __ncols: int) -> _CursesWindow: ... def newwin(__nlines: int, __ncols: int, __begin_y: int = ..., __begin_x: int = ...) -> _CursesWindow: ... - def nl(__flag: bool = ...) -> None: ... + def nl(__flag: bool = True) -> None: ... def nocbreak() -> None: ... def noecho() -> None: ... def nonl() -> None: ... @@ -330,9 +331,9 @@ if sys.platform != "win32": def noraw() -> None: ... def pair_content(__pair_number: int) -> tuple[int, int]: ... def pair_number(__attr: int) -> int: ... - def putp(__string: bytes) -> None: ... - def qiflush(__flag: bool = ...) -> None: ... - def raw(__flag: bool = ...) -> None: ... + def putp(__string: ReadOnlyBuffer) -> None: ... + def qiflush(__flag: bool = True) -> None: ... + def raw(__flag: bool = True) -> None: ... def reset_prog_mode() -> None: ... def reset_shell_mode() -> None: ... def resetty() -> None: ... @@ -344,7 +345,7 @@ if sys.platform != "win32": def set_tabsize(__size: int) -> None: ... def setsyx(__y: int, __x: int) -> None: ... - def setupterm(term: str | None = ..., fd: int = ...) -> None: ... + def setupterm(term: str | None = None, fd: int = -1) -> None: ... def start_color() -> None: ... def termattrs() -> int: ... def termname() -> bytes: ... @@ -352,16 +353,16 @@ if sys.platform != "win32": def tigetnum(__capname: str) -> int: ... def tigetstr(__capname: str) -> bytes | None: ... def tparm( - __str: bytes, - __i1: int = ..., - __i2: int = ..., - __i3: int = ..., - __i4: int = ..., - __i5: int = ..., - __i6: int = ..., - __i7: int = ..., - __i8: int = ..., - __i9: int = ..., + __str: ReadOnlyBuffer, + __i1: int = 0, + __i2: int = 0, + __i3: int = 0, + __i4: int = 0, + __i5: int = 0, + __i6: int = 0, + __i7: int = 0, + __i8: int = 0, + __i9: int = 0, ) -> bytes: ... def typeahead(__fd: int) -> None: ... def unctrl(__ch: _ChType) -> bytes: ... diff --git a/mypy/typeshed/stdlib/_decimal.pyi b/mypy/typeshed/stdlib/_decimal.pyi index 50c0f23734cd..b8208fe180a1 100644 --- a/mypy/typeshed/stdlib/_decimal.pyi +++ b/mypy/typeshed/stdlib/_decimal.pyi @@ -1,22 +1,21 @@ import numbers import sys -from _typeshed import Self from collections.abc import Container, Sequence from types import TracebackType -from typing import Any, ClassVar, NamedTuple, Union, overload -from typing_extensions import TypeAlias +from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Final, Literal, Self, TypeAlias _Decimal: TypeAlias = Decimal | int -_DecimalNew: TypeAlias = Union[Decimal, float, str, tuple[int, Sequence[int], int]] +_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] _ComparableNum: TypeAlias = Decimal | float | numbers.Rational -__version__: str -__libmpdec_version__: str +__version__: Final[str] +__libmpdec_version__: Final[str] class DecimalTuple(NamedTuple): sign: int digits: tuple[int, ...] - exponent: int + exponent: int | Literal["n", "N", "F"] ROUND_DOWN: str ROUND_HALF_UP: str @@ -53,7 +52,7 @@ def getcontext() -> Context: ... if sys.version_info >= (3, 11): def localcontext( - ctx: Context | None = ..., + ctx: Context | None = None, *, prec: int | None = ..., rounding: str | None = ..., @@ -66,17 +65,17 @@ if sys.version_info >= (3, 11): ) -> _ContextManager: ... else: - def localcontext(ctx: Context | None = ...) -> _ContextManager: ... + def localcontext(ctx: Context | None = None) -> _ContextManager: ... class Decimal: - def __new__(cls: type[Self], value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... + def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... @classmethod - def from_float(cls: type[Self], __f: float) -> Self: ... + def from_float(cls, __f: float) -> Self: ... def __bool__(self) -> bool: ... - def compare(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def as_tuple(self) -> DecimalTuple: ... def as_integer_ratio(self) -> tuple[int, int]: ... - def to_eng_string(self, context: Context | None = ...) -> str: ... + def to_eng_string(self, context: Context | None = None) -> str: ... def __abs__(self) -> Decimal: ... def __add__(self, __other: _Decimal) -> Decimal: ... def __divmod__(self, __other: _Decimal) -> tuple[Decimal, Decimal]: ... @@ -100,7 +99,7 @@ class Decimal: def __rtruediv__(self, __other: _Decimal) -> Decimal: ... def __sub__(self, __other: _Decimal) -> Decimal: ... def __truediv__(self, __other: _Decimal) -> Decimal: ... - def remainder_near(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __trunc__(self) -> int: ... @@ -116,56 +115,56 @@ class Decimal: def __round__(self, __ndigits: int) -> Decimal: ... def __floor__(self) -> int: ... def __ceil__(self) -> int: ... - def fma(self, other: _Decimal, third: _Decimal, context: Context | None = ...) -> Decimal: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... def __rpow__(self, __other: _Decimal, __context: Context | None = ...) -> Decimal: ... - def normalize(self, context: Context | None = ...) -> Decimal: ... - def quantize(self, exp: _Decimal, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def same_quantum(self, other: _Decimal, context: Context | None = ...) -> bool: ... - def to_integral_exact(self, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def to_integral_value(self, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def to_integral(self, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... - def sqrt(self, context: Context | None = ...) -> Decimal: ... - def max(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def min(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def normalize(self, context: Context | None = None) -> Decimal: ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def sqrt(self, context: Context | None = None) -> Decimal: ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def adjusted(self) -> int: ... def canonical(self) -> Decimal: ... - def compare_signal(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def compare_total(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def compare_total_mag(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... def copy_abs(self) -> Decimal: ... def copy_negate(self) -> Decimal: ... - def copy_sign(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def exp(self, context: Context | None = ...) -> Decimal: ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def exp(self, context: Context | None = None) -> Decimal: ... def is_canonical(self) -> bool: ... def is_finite(self) -> bool: ... def is_infinite(self) -> bool: ... def is_nan(self) -> bool: ... - def is_normal(self, context: Context | None = ...) -> bool: ... + def is_normal(self, context: Context | None = None) -> bool: ... def is_qnan(self) -> bool: ... def is_signed(self) -> bool: ... def is_snan(self) -> bool: ... - def is_subnormal(self, context: Context | None = ...) -> bool: ... + def is_subnormal(self, context: Context | None = None) -> bool: ... def is_zero(self) -> bool: ... - def ln(self, context: Context | None = ...) -> Decimal: ... - def log10(self, context: Context | None = ...) -> Decimal: ... - def logb(self, context: Context | None = ...) -> Decimal: ... - def logical_and(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def logical_invert(self, context: Context | None = ...) -> Decimal: ... - def logical_or(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def logical_xor(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def max_mag(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def min_mag(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def next_minus(self, context: Context | None = ...) -> Decimal: ... - def next_plus(self, context: Context | None = ...) -> Decimal: ... - def next_toward(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def number_class(self, context: Context | None = ...) -> str: ... + def ln(self, context: Context | None = None) -> Decimal: ... + def log10(self, context: Context | None = None) -> Decimal: ... + def logb(self, context: Context | None = None) -> Decimal: ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_invert(self, context: Context | None = None) -> Decimal: ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def next_minus(self, context: Context | None = None) -> Decimal: ... + def next_plus(self, context: Context | None = None) -> Decimal: ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def number_class(self, context: Context | None = None) -> str: ... def radix(self) -> Decimal: ... - def rotate(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def scaleb(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def shift(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[str]]: ... - def __copy__(self: Self) -> Self: ... - def __deepcopy__(self: Self, __memo: Any) -> Self: ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, __memo: Any) -> Self: ... def __format__(self, __specifier: str, __context: Context | None = ...) -> str: ... class _ContextManager: @@ -203,7 +202,7 @@ class Context: traps: None | dict[_TrapType, bool] | Container[_TrapType] = ..., _ignored_flags: list[_TrapType] | None = ..., ) -> None: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[Any, ...]]: ... + def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... def clear_flags(self) -> None: ... def clear_traps(self) -> None: ... def copy(self) -> Context: ... @@ -212,7 +211,7 @@ class Context: __hash__: ClassVar[None] # type: ignore[assignment] def Etiny(self) -> int: ... def Etop(self) -> int: ... - def create_decimal(self, __num: _DecimalNew = ...) -> Decimal: ... + def create_decimal(self, __num: _DecimalNew = "0") -> Decimal: ... def create_decimal_from_float(self, __f: float) -> Decimal: ... def abs(self, __x: _Decimal) -> Decimal: ... def add(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... @@ -259,7 +258,7 @@ class Context: def normalize(self, __x: _Decimal) -> Decimal: ... def number_class(self, __x: _Decimal) -> str: ... def plus(self, __x: _Decimal) -> Decimal: ... - def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = ...) -> Decimal: ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... def quantize(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... def radix(self) -> Decimal: ... def remainder(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... diff --git a/mypy/typeshed/stdlib/_dummy_thread.pyi b/mypy/typeshed/stdlib/_dummy_thread.pyi index ff16b1d3dcf4..e371dd0e9933 100644 --- a/mypy/typeshed/stdlib/_dummy_thread.pyi +++ b/mypy/typeshed/stdlib/_dummy_thread.pyi @@ -11,12 +11,12 @@ def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwa def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... -def stack_size(size: int | None = ...) -> int: ... +def stack_size(size: int | None = None) -> int: ... class LockType: locked_status: bool - def acquire(self, waitflag: bool | None = ..., timeout: int = ...) -> bool: ... - def __enter__(self, waitflag: bool | None = ..., timeout: int = ...) -> bool: ... + def acquire(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... + def __enter__(self, waitflag: bool | None = None, timeout: int = -1) -> bool: ... def __exit__(self, typ: type[BaseException] | None, val: BaseException | None, tb: TracebackType | None) -> None: ... def release(self) -> bool: ... def locked(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/_dummy_threading.pyi b/mypy/typeshed/stdlib/_dummy_threading.pyi index c956946c8363..9a49dfa9649e 100644 --- a/mypy/typeshed/stdlib/_dummy_threading.pyi +++ b/mypy/typeshed/stdlib/_dummy_threading.pyi @@ -41,7 +41,7 @@ def enumerate() -> list[Thread]: ... def main_thread() -> Thread: ... def settrace(func: TraceFunction) -> None: ... def setprofile(func: ProfileFunction | None) -> None: ... -def stack_size(size: int = ...) -> int: ... +def stack_size(size: int | None = None) -> int: ... TIMEOUT_MAX: float @@ -59,17 +59,17 @@ class Thread: def ident(self) -> int | None: ... def __init__( self, - group: None = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., - kwargs: Mapping[str, Any] | None = ..., + kwargs: Mapping[str, Any] | None = None, *, - daemon: bool | None = ..., + daemon: bool | None = None, ) -> None: ... def start(self) -> None: ... def run(self) -> None: ... - def join(self, timeout: float | None = ...) -> None: ... + def join(self, timeout: float | None = None) -> None: ... def getName(self) -> str: ... def setName(self, name: str) -> None: ... if sys.version_info >= (3, 8): @@ -86,7 +86,6 @@ class Thread: class _DummyThread(Thread): ... class Lock: - def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -96,37 +95,36 @@ class Lock: def locked(self) -> bool: ... class _RLock: - def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... - def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... RLock = _RLock class Condition: - def __init__(self, lock: Lock | _RLock | None = ...) -> None: ... + def __init__(self, lock: Lock | _RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... - def wait_for(self, predicate: Callable[[], _T], timeout: float | None = ...) -> _T: ... - def notify(self, n: int = ...) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... def notifyAll(self) -> None: ... class Semaphore: - def __init__(self, value: int = ...) -> None: ... + def __init__(self, value: int = 1) -> None: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... - def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... - def __enter__(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 9): def release(self, n: int = ...) -> None: ... else: @@ -135,11 +133,10 @@ class Semaphore: class BoundedSemaphore(Semaphore): ... class Event: - def __init__(self) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 8): from _thread import _excepthook, _ExceptHookArgs @@ -152,8 +149,8 @@ class Timer(Thread): self, interval: float, function: Callable[..., object], - args: Iterable[Any] | None = ..., - kwargs: Mapping[str, Any] | None = ..., + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, ) -> None: ... def cancel(self) -> None: ... @@ -164,8 +161,8 @@ class Barrier: def n_waiting(self) -> int: ... @property def broken(self) -> bool: ... - def __init__(self, parties: int, action: Callable[[], None] | None = ..., timeout: float | None = ...) -> None: ... - def wait(self, timeout: float | None = ...) -> int: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... def reset(self) -> None: ... def abort(self) -> None: ... diff --git a/mypy/typeshed/stdlib/_heapq.pyi b/mypy/typeshed/stdlib/_heapq.pyi index 90dc28deb71f..8d6c3e88103e 100644 --- a/mypy/typeshed/stdlib/_heapq.pyi +++ b/mypy/typeshed/stdlib/_heapq.pyi @@ -1,8 +1,9 @@ from typing import Any, TypeVar +from typing_extensions import Final _T = TypeVar("_T") -__about__: str +__about__: Final[str] def heapify(__heap: list[Any]) -> None: ... def heappop(__heap: list[_T]) -> _T: ... diff --git a/mypy/typeshed/stdlib/_imp.pyi b/mypy/typeshed/stdlib/_imp.pyi index 2b54a0f6fb42..adab2e803efe 100644 --- a/mypy/typeshed/stdlib/_imp.pyi +++ b/mypy/typeshed/stdlib/_imp.pyi @@ -8,7 +8,7 @@ check_hash_based_pycs: str def source_hash(key: int, source: ReadableBuffer) -> bytes: ... def create_builtin(__spec: ModuleSpec) -> types.ModuleType: ... -def create_dynamic(__spec: ModuleSpec, __file: Any = ...) -> types.ModuleType: ... +def create_dynamic(__spec: ModuleSpec, __file: Any = None) -> types.ModuleType: ... def acquire_lock() -> None: ... def exec_builtin(__mod: types.ModuleType) -> int: ... def exec_dynamic(__mod: types.ModuleType) -> int: ... @@ -21,8 +21,8 @@ def lock_held() -> bool: ... def release_lock() -> None: ... if sys.version_info >= (3, 11): - def find_frozen(__name: str, *, withdata: bool = ...) -> tuple[memoryview | None, bool, str | None] | None: ... - def get_frozen_object(__name: str, __data: ReadableBuffer | None = ...) -> types.CodeType: ... + def find_frozen(__name: str, *, withdata: bool = False) -> tuple[memoryview | None, bool, str | None] | None: ... + def get_frozen_object(__name: str, __data: ReadableBuffer | None = None) -> types.CodeType: ... else: def get_frozen_object(__name: str) -> types.CodeType: ... diff --git a/mypy/typeshed/stdlib/_markupbase.pyi b/mypy/typeshed/stdlib/_markupbase.pyi index 7d2a39a7aaea..62bad25e5ccc 100644 --- a/mypy/typeshed/stdlib/_markupbase.pyi +++ b/mypy/typeshed/stdlib/_markupbase.pyi @@ -5,9 +5,9 @@ class ParserBase: def reset(self) -> None: ... def getpos(self) -> tuple[int, int]: ... def unknown_decl(self, data: str) -> None: ... - def parse_comment(self, i: int, report: int = ...) -> int: ... # undocumented + def parse_comment(self, i: int, report: int = 1) -> int: ... # undocumented def parse_declaration(self, i: int) -> int: ... # undocumented - def parse_marked_section(self, i: int, report: int = ...) -> int: ... # undocumented + def parse_marked_section(self, i: int, report: int = 1) -> int: ... # undocumented def updatepos(self, i: int, j: int) -> int: ... # undocumented if sys.version_info < (3, 10): # Removed from ParserBase: https://bugs.python.org/issue31844 diff --git a/mypy/typeshed/stdlib/_msi.pyi b/mypy/typeshed/stdlib/_msi.pyi index 9dda8a598549..2fdbdfd0e9f4 100644 --- a/mypy/typeshed/stdlib/_msi.pyi +++ b/mypy/typeshed/stdlib/_msi.pyi @@ -1,7 +1,6 @@ import sys if sys.platform == "win32": - # Actual typename View, not exposed by the implementation class _View: def Execute(self, params: _Record | None = ...) -> None: ... @@ -12,24 +11,27 @@ if sys.platform == "win32": # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - # Actual typename Summary, not exposed by the implementation - class _Summary: - def GetProperty(self, propid: int) -> str | bytes | None: ... + + # Actual typename SummaryInformation, not exposed by the implementation + class _SummaryInformation: + def GetProperty(self, field: int) -> int | bytes | None: ... def GetPropertyCount(self) -> int: ... - def SetProperty(self, propid: int, value: str | bytes) -> None: ... + def SetProperty(self, field: int, value: int | str) -> None: ... def Persist(self) -> None: ... # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] + # Actual typename Database, not exposed by the implementation class _Database: def OpenView(self, sql: str) -> _View: ... def Commit(self) -> None: ... - def GetSummaryInformation(self, updateCount: int) -> _Summary: ... + def GetSummaryInformation(self, updateCount: int) -> _SummaryInformation: ... def Close(self) -> None: ... # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] + # Actual typename Record, not exposed by the implementation class _Record: def GetFieldCount(self) -> int: ... diff --git a/mypy/typeshed/stdlib/_operator.pyi b/mypy/typeshed/stdlib/_operator.pyi index 92e04d0f499d..e7d1a98c4027 100644 --- a/mypy/typeshed/stdlib/_operator.pyi +++ b/mypy/typeshed/stdlib/_operator.pyi @@ -1,5 +1,6 @@ import sys -from collections.abc import Callable, Container, Iterable, Mapping, MutableMapping, MutableSequence, Sequence +from _typeshed import SupportsGetItem +from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, TypeVar, overload from typing_extensions import ParamSpec, SupportsIndex, TypeAlias, final @@ -77,11 +78,9 @@ def delitem(__a: MutableSequence[Any], __b: slice) -> None: ... @overload def delitem(__a: MutableMapping[_K, Any], __b: _K) -> None: ... @overload -def getitem(__a: Sequence[_T], __b: SupportsIndex) -> _T: ... -@overload def getitem(__a: Sequence[_T], __b: slice) -> Sequence[_T]: ... @overload -def getitem(__a: Mapping[_K, _V], __b: _K) -> _V: ... +def getitem(__a: SupportsGetItem[_K, _V], __b: _K) -> _V: ... def indexOf(__a: Iterable[_T], __b: _T) -> int: ... @overload def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... @@ -89,7 +88,7 @@ def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... def setitem(__a: MutableSequence[_T], __b: slice, __c: Sequence[_T]) -> None: ... @overload def setitem(__a: MutableMapping[_K, _V], __b: _K, __c: _V) -> None: ... -def length_hint(__obj: object, __default: int = ...) -> int: ... +def length_hint(__obj: object, __default: int = 0) -> int: ... @final class attrgetter(Generic[_T_co]): @overload @@ -106,17 +105,30 @@ class attrgetter(Generic[_T_co]): @final class itemgetter(Generic[_T_co]): + # mypy lacks support for PEP 646 https://github.com/python/mypy/issues/12280 + # So we have to define all of these overloads to simulate unpacking the arguments @overload - def __new__(cls, item: Any) -> itemgetter[Any]: ... + def __new__(cls, item: _T_co) -> itemgetter[_T_co]: ... @overload - def __new__(cls, item: Any, __item2: Any) -> itemgetter[tuple[Any, Any]]: ... + def __new__(cls, item: _T_co, __item2: _T_co) -> itemgetter[tuple[_T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any) -> itemgetter[tuple[Any, Any, Any]]: ... + def __new__(cls, item: _T_co, __item2: _T_co, __item3: _T_co) -> itemgetter[tuple[_T_co, _T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any, __item4: Any) -> itemgetter[tuple[Any, Any, Any, Any]]: ... + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co + ) -> itemgetter[tuple[_T_co, _T_co, _T_co, _T_co]]: ... @overload - def __new__(cls, item: Any, *items: Any) -> itemgetter[tuple[Any, ...]]: ... - def __call__(self, obj: Any) -> _T_co: ... + def __new__( + cls, item: _T_co, __item2: _T_co, __item3: _T_co, __item4: _T_co, *items: _T_co + ) -> itemgetter[tuple[_T_co, ...]]: ... + # __key: _KT_contra in SupportsGetItem seems to be causing variance issues, ie: + # TypeVar "_KT_contra@SupportsGetItem" is contravariant + # "tuple[int, int]" is incompatible with protocol "SupportsIndex" + # preventing [_T_co, ...] instead of [Any, ...] + # + # A suspected mypy issue prevents using [..., _T] instead of [..., Any] here. + # https://github.com/python/mypy/issues/14032 + def __call__(self, obj: SupportsGetItem[Any, Any]) -> Any: ... @final class methodcaller: diff --git a/mypy/typeshed/stdlib/_osx_support.pyi b/mypy/typeshed/stdlib/_osx_support.pyi index 7fd0ee922ca6..3eb6f4ddc67c 100644 --- a/mypy/typeshed/stdlib/_osx_support.pyi +++ b/mypy/typeshed/stdlib/_osx_support.pyi @@ -12,10 +12,10 @@ _UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented _COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented _INITPRE: str # undocumented -def _find_executable(executable: str, path: str | None = ...) -> str | None: ... # undocumented +def _find_executable(executable: str, path: str | None = None) -> str | None: ... # undocumented if sys.version_info >= (3, 8): - def _read_output(commandstring: str, capture_stderr: bool = ...) -> str | None: ... # undocumented + def _read_output(commandstring: str, capture_stderr: bool = False) -> str | None: ... # undocumented else: def _read_output(commandstring: str) -> str | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/_posixsubprocess.pyi b/mypy/typeshed/stdlib/_posixsubprocess.pyi index 2d221c4896f6..ca95336bb503 100644 --- a/mypy/typeshed/stdlib/_posixsubprocess.pyi +++ b/mypy/typeshed/stdlib/_posixsubprocess.pyi @@ -1,24 +1,32 @@ import sys +from _typeshed import StrOrBytesPath from collections.abc import Callable, Sequence +from typing_extensions import SupportsIndex if sys.platform != "win32": def cloexec_pipe() -> tuple[int, int]: ... def fork_exec( - args: Sequence[str], - executable_list: Sequence[bytes], - close_fds: bool, - fds_to_keep: Sequence[int], - cwd: str, - env_list: Sequence[bytes], - p2cread: int, - p2cwrite: int, - c2pred: int, - c2pwrite: int, - errread: int, - errwrite: int, - errpipe_read: int, - errpipe_write: int, - restore_signals: int, - start_new_session: int, - preexec_fn: Callable[[], None], + __process_args: Sequence[StrOrBytesPath] | None, + __executable_list: Sequence[bytes], + __close_fds: bool, + __fds_to_keep: tuple[int, ...], + __cwd_obj: str, + __env_list: Sequence[bytes] | None, + __p2cread: int, + __p2cwrite: int, + __c2pred: int, + __c2pwrite: int, + __errread: int, + __errwrite: int, + __errpipe_read: int, + __errpipe_write: int, + __restore_signals: int, + __call_setsid: int, + __pgid_to_set: int, + __gid_object: SupportsIndex | None, + __groups_list: list[int] | None, + __uid_object: SupportsIndex | None, + __child_umask: int, + __preexec_fn: Callable[[], None], + __allow_vfork: bool, ) -> int: ... diff --git a/mypy/typeshed/stdlib/_py_abc.pyi b/mypy/typeshed/stdlib/_py_abc.pyi index ddf04364a238..cc45c6ad3814 100644 --- a/mypy/typeshed/stdlib/_py_abc.pyi +++ b/mypy/typeshed/stdlib/_py_abc.pyi @@ -1,4 +1,4 @@ -from _typeshed import Self +import _typeshed from typing import Any, NewType, TypeVar _T = TypeVar("_T") @@ -8,5 +8,7 @@ _CacheToken = NewType("_CacheToken", int) def get_cache_token() -> _CacheToken: ... class ABCMeta(type): - def __new__(__mcls: type[Self], __name: str, __bases: tuple[type[Any], ...], __namespace: dict[str, Any]) -> Self: ... + def __new__( + __mcls: type[_typeshed.Self], __name: str, __bases: tuple[type[Any], ...], __namespace: dict[str, Any] + ) -> _typeshed.Self: ... def register(cls, subclass: type[_T]) -> type[_T]: ... diff --git a/mypy/typeshed/stdlib/_random.pyi b/mypy/typeshed/stdlib/_random.pyi index c4b235f0cd5b..7c5803ede781 100644 --- a/mypy/typeshed/stdlib/_random.pyi +++ b/mypy/typeshed/stdlib/_random.pyi @@ -5,7 +5,7 @@ _State: TypeAlias = tuple[int, ...] class Random: def __init__(self, seed: object = ...) -> None: ... - def seed(self, __n: object = ...) -> None: ... + def seed(self, __n: object = None) -> None: ... def getstate(self) -> _State: ... def setstate(self, __state: _State) -> None: ... def random(self) -> float: ... diff --git a/mypy/typeshed/stdlib/_sitebuiltins.pyi b/mypy/typeshed/stdlib/_sitebuiltins.pyi index 4a35921e1ef7..3bda2d88425d 100644 --- a/mypy/typeshed/stdlib/_sitebuiltins.pyi +++ b/mypy/typeshed/stdlib/_sitebuiltins.pyi @@ -6,7 +6,7 @@ class Quitter: name: str eof: str def __init__(self, name: str, eof: str) -> None: ... - def __call__(self, code: int | None = ...) -> NoReturn: ... + def __call__(self, code: int | None = None) -> NoReturn: ... class _Printer: MAXLINES: ClassVar[Literal[23]] diff --git a/mypy/typeshed/stdlib/_socket.pyi b/mypy/typeshed/stdlib/_socket.pyi index 09dbaae3dc64..f7b0e6901bf4 100644 --- a/mypy/typeshed/stdlib/_socket.pyi +++ b/mypy/typeshed/stdlib/_socket.pyi @@ -15,10 +15,10 @@ _CMSG: TypeAlias = tuple[int, int, bytes] _CMSGArg: TypeAlias = tuple[int, int, ReadableBuffer] # Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, -# AF_NETLINK, AF_TIPC) or strings (AF_UNIX). -_Address: TypeAlias = tuple[Any, ...] | str +# AF_NETLINK, AF_TIPC) or strings/buffers (AF_UNIX). +# See getsockaddrarg() in socketmodule.c. +_Address: TypeAlias = tuple[Any, ...] | str | ReadableBuffer _RetAddress: TypeAlias = Any -# TODO Most methods allow bytes as address objects # ----- Constants ----- # Some socket families are listed in the "Socket families" section of the docs, @@ -583,11 +583,15 @@ class socket: def proto(self) -> int: ... @property def timeout(self) -> float | None: ... - def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... - def bind(self, __address: _Address | bytes) -> None: ... + if sys.platform == "win32": + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | bytes | None = ...) -> None: ... + else: + def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... + + def bind(self, __address: _Address) -> None: ... def close(self) -> None: ... - def connect(self, __address: _Address | bytes) -> None: ... - def connect_ex(self, __address: _Address | bytes) -> int: ... + def connect(self, __address: _Address) -> None: ... + def connect_ex(self, __address: _Address) -> int: ... def detach(self) -> int: ... def fileno(self) -> int: ... def getpeername(self) -> _RetAddress: ... @@ -624,7 +628,7 @@ class socket: __buffers: Iterable[ReadableBuffer], __ancdata: Iterable[_CMSGArg] = ..., __flags: int = ..., - __address: _Address = ..., + __address: _Address | None = ..., ) -> int: ... if sys.platform == "linux": def sendmsg_afalg( @@ -634,7 +638,7 @@ class socket: def setblocking(self, __flag: bool) -> None: ... def settimeout(self, __value: float | None) -> None: ... @overload - def setsockopt(self, __level: int, __optname: int, __value: int | bytes) -> None: ... + def setsockopt(self, __level: int, __optname: int, __value: int | ReadableBuffer) -> None: ... @overload def setsockopt(self, __level: int, __optname: int, __value: None, __optlen: int) -> None: ... if sys.platform == "win32": @@ -671,9 +675,9 @@ def ntohs(__x: int) -> int: ... # param & ret val are 16-bit ints def htonl(__x: int) -> int: ... # param & ret val are 32-bit ints def htons(__x: int) -> int: ... # param & ret val are 16-bit ints def inet_aton(__ip_string: str) -> bytes: ... # ret val 4 bytes in length -def inet_ntoa(__packed_ip: bytes) -> str: ... +def inet_ntoa(__packed_ip: ReadableBuffer) -> str: ... def inet_pton(__address_family: int, __ip_string: str) -> bytes: ... -def inet_ntop(__address_family: int, __packed_ip: bytes) -> str: ... +def inet_ntop(__address_family: int, __packed_ip: ReadableBuffer) -> str: ... def getdefaulttimeout() -> float | None: ... def setdefaulttimeout(__timeout: float | None) -> None: ... diff --git a/mypy/typeshed/stdlib/_threading_local.pyi b/mypy/typeshed/stdlib/_threading_local.pyi index d455ce09227e..98683dabcef8 100644 --- a/mypy/typeshed/stdlib/_threading_local.pyi +++ b/mypy/typeshed/stdlib/_threading_local.pyi @@ -14,3 +14,4 @@ class _localimpl: class local: def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... + def __delattr__(self, name: str) -> None: ... diff --git a/mypy/typeshed/stdlib/_tkinter.pyi b/mypy/typeshed/stdlib/_tkinter.pyi index c2cf55505afb..271fd37df68b 100644 --- a/mypy/typeshed/stdlib/_tkinter.pyi +++ b/mypy/typeshed/stdlib/_tkinter.pyi @@ -17,8 +17,10 @@ from typing_extensions import Literal, final # (, ) @final class Tcl_Obj: - string: str | bytes - typename: str + @property + def string(self) -> str: ... + @property + def typename(self) -> str: ... __hash__: ClassVar[None] # type: ignore[assignment] def __eq__(self, __other): ... def __ge__(self, __other): ... @@ -58,7 +60,7 @@ class TkappType: def createtimerhandler(self, __milliseconds, __func): ... def deletecommand(self, __name): ... - def dooneevent(self, __flags: int = ...): ... + def dooneevent(self, __flags: int = 0): ... def eval(self, __script: str) -> str: ... def evalfile(self, __fileName): ... def exprboolean(self, __s): ... @@ -74,7 +76,7 @@ class TkappType: def globalunsetvar(self, *args, **kwargs): ... def interpaddr(self): ... def loadtk(self) -> None: ... - def mainloop(self, __threshold: int = ...): ... + def mainloop(self, __threshold: int = 0): ... def quit(self): ... def record(self, __script): ... def setvar(self, *ags, **kwargs): ... @@ -105,15 +107,29 @@ TK_VERSION: str class TkttType: def deletetimerhandler(self): ... -def create( - __screenName: str | None = ..., - __baseName: str | None = ..., - __className: str = ..., - __interactive: bool = ..., - __wantobjects: bool = ..., - __wantTk: bool = ..., - __sync: bool = ..., - __use: str | None = ..., -): ... +if sys.version_info >= (3, 8): + def create( + __screenName: str | None = None, + __baseName: str = "", + __className: str = "Tk", + __interactive: bool = False, + __wantobjects: bool = False, + __wantTk: bool = True, + __sync: bool = False, + __use: str | None = None, + ): ... + +else: + def create( + __screenName: str | None = None, + __baseName: str | None = None, + __className: str = "Tk", + __interactive: bool = False, + __wantobjects: bool = False, + __wantTk: bool = True, + __sync: bool = False, + __use: str | None = None, + ): ... + def getbusywaitinterval(): ... def setbusywaitinterval(__new_val): ... diff --git a/mypy/typeshed/stdlib/_tracemalloc.pyi b/mypy/typeshed/stdlib/_tracemalloc.pyi index 2262d4b16b3a..1b79d9dc5785 100644 --- a/mypy/typeshed/stdlib/_tracemalloc.pyi +++ b/mypy/typeshed/stdlib/_tracemalloc.pyi @@ -13,5 +13,5 @@ def is_tracing() -> bool: ... if sys.version_info >= (3, 9): def reset_peak() -> None: ... -def start(__nframe: int = ...) -> None: ... +def start(__nframe: int = 1) -> None: ... def stop() -> None: ... diff --git a/mypy/typeshed/stdlib/_typeshed/__init__.pyi b/mypy/typeshed/stdlib/_typeshed/__init__.pyi index 89ca9d81619a..d0c6b3ab1173 100644 --- a/mypy/typeshed/stdlib/_typeshed/__init__.pyi +++ b/mypy/typeshed/stdlib/_typeshed/__init__.pyi @@ -7,10 +7,11 @@ import ctypes import mmap import pickle import sys -from collections.abc import Awaitable, Callable, Container, Iterable, Set as AbstractSet +from collections.abc import Awaitable, Callable, Iterable, Set as AbstractSet +from dataclasses import Field from os import PathLike from types import FrameType, TracebackType -from typing import Any, AnyStr, Generic, Protocol, TypeVar, Union +from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar from typing_extensions import Final, Literal, LiteralString, TypeAlias, final _KT = TypeVar("_KT") @@ -36,6 +37,9 @@ AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 # "Incomplete | None" instead of "Any | None". Incomplete: TypeAlias = Any +# To describe a function parameter that is unused and will work with anything. +Unused: TypeAlias = object + # stable class IdentityFunction(Protocol): def __call__(self, __x: _T) -> _T: ... @@ -115,16 +119,17 @@ class SupportsItems(Protocol[_KT_co, _VT_co]): # stable class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): def keys(self) -> Iterable[_KT]: ... - def __getitem__(self, __k: _KT) -> _VT_co: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... # stable -class SupportsGetItem(Container[_KT_contra], Protocol[_KT_contra, _VT_co]): - def __getitem__(self, __k: _KT_contra) -> _VT_co: ... +class SupportsGetItem(Protocol[_KT_contra, _VT_co]): + def __contains__(self, __x: Any) -> bool: ... + def __getitem__(self, __key: _KT_contra) -> _VT_co: ... # stable class SupportsItemAccess(SupportsGetItem[_KT_contra, _VT], Protocol[_KT_contra, _VT]): - def __setitem__(self, __k: _KT_contra, __v: _VT) -> None: ... - def __delitem__(self, __v: _KT_contra) -> None: ... + def __setitem__(self, __key: _KT_contra, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT_contra) -> None: ... StrPath: TypeAlias = str | PathLike[str] # stable BytesPath: TypeAlias = bytes | PathLike[bytes] # stable @@ -204,6 +209,7 @@ class HasFileno(Protocol): FileDescriptor: TypeAlias = int # stable FileDescriptorLike: TypeAlias = int | HasFileno # stable +FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath # stable class SupportsRead(Protocol[_T_co]): @@ -233,9 +239,33 @@ else: WriteableBuffer: TypeAlias = bytearray | memoryview | array.array[Any] | mmap.mmap | ctypes._CData # stable # Same as _WriteableBuffer, but also includes read-only buffer types (like bytes). ReadableBuffer: TypeAlias = ReadOnlyBuffer | WriteableBuffer # stable +_BufferWithLen: TypeAlias = ReadableBuffer # not stable # noqa: Y047 + +# Anything that implements the read-write buffer interface, and can be sliced/indexed. +SliceableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +IndexableBuffer: TypeAlias = bytes | bytearray | memoryview | array.array[Any] | mmap.mmap +# https://github.com/python/typeshed/pull/9115#issuecomment-1304905864 +# Post PEP 688, they should be rewritten as such: +# from collections.abc import Sequence +# from typing import Sized, overload +# class SliceableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# class IndexableBuffer(Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __getitem__(self, __i: int) -> int: ... +# class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol): +# def __buffer__(self, __flags: int) -> memoryview: ... +# def __contains__(self, __x: Any) -> bool: ... +# @overload +# def __getitem__(self, __slice: slice) -> Sequence[int]: ... +# @overload +# def __getitem__(self, __i: int) -> int: ... +# class SizedBuffer(Sized, Protocol): # instead of _BufferWithLen +# def __buffer__(self, __flags: int) -> memoryview: ... ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] -OptExcInfo: TypeAlias = Union[ExcInfo, tuple[None, None, None]] +OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None] # stable if sys.version_info >= (3, 10): @@ -274,5 +304,11 @@ StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] # Objects suitable to be passed to sys.settrace, threading.settrace, and similar -# TODO: Ideally this would be a recursive type alias -TraceFunction: TypeAlias = Callable[[FrameType, str, Any], Callable[[FrameType, str, Any], Any] | None] +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] + +# experimental +# Might not work as expected for pyright, see +# https://github.com/python/typeshed/pull/9362 +# https://github.com/microsoft/pyright/issues/4339 +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] diff --git a/mypy/typeshed/stdlib/_warnings.pyi b/mypy/typeshed/stdlib/_warnings.pyi index 2eb9ae478a5d..0981dfeaafee 100644 --- a/mypy/typeshed/stdlib/_warnings.pyi +++ b/mypy/typeshed/stdlib/_warnings.pyi @@ -5,9 +5,9 @@ _onceregistry: dict[Any, Any] filters: list[tuple[str, str | None, type[Warning], str | None, int]] @overload -def warn(message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... +def warn(message: str, category: type[Warning] | None = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload -def warn(message: Warning, category: Any = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... +def warn(message: Warning, category: Any = None, stacklevel: int = 1, source: Any | None = None) -> None: ... @overload def warn_explicit( message: str, diff --git a/mypy/typeshed/stdlib/_weakref.pyi b/mypy/typeshed/stdlib/_weakref.pyi index 742bc3ad9f36..2a43de3ffd6b 100644 --- a/mypy/typeshed/stdlib/_weakref.pyi +++ b/mypy/typeshed/stdlib/_weakref.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Callable from typing import Any, Generic, TypeVar, overload -from typing_extensions import final +from typing_extensions import Self, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -21,7 +20,7 @@ class ProxyType(Generic[_T]): # "weakproxy" class ReferenceType(Generic[_T]): __callback__: Callable[[ReferenceType[_T]], Any] - def __new__(cls: type[Self], o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... + def __new__(cls, o: _T, callback: Callable[[ReferenceType[_T]], Any] | None = ...) -> Self: ... def __call__(self) -> _T | None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -33,6 +32,6 @@ def getweakrefs(__object: Any) -> list[Any]: ... # Return CallableProxyType if object is callable, ProxyType otherwise @overload -def proxy(__object: _C, __callback: Callable[[_C], Any] | None = ...) -> CallableProxyType[_C]: ... +def proxy(__object: _C, __callback: Callable[[_C], Any] | None = None) -> CallableProxyType[_C]: ... @overload -def proxy(__object: _T, __callback: Callable[[_T], Any] | None = ...) -> Any: ... +def proxy(__object: _T, __callback: Callable[[_T], Any] | None = None) -> Any: ... diff --git a/mypy/typeshed/stdlib/_weakrefset.pyi b/mypy/typeshed/stdlib/_weakrefset.pyi index da09442e855b..d73d79155329 100644 --- a/mypy/typeshed/stdlib/_weakrefset.pyi +++ b/mypy/typeshed/stdlib/_weakrefset.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Iterable, Iterator, MutableSet from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -13,26 +13,26 @@ _T = TypeVar("_T") class WeakSet(MutableSet[_T], Generic[_T]): @overload - def __init__(self, data: None = ...) -> None: ... + def __init__(self, data: None = None) -> None: ... @overload def __init__(self, data: Iterable[_T]) -> None: ... def add(self, item: _T) -> None: ... def discard(self, item: _T) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def remove(self, item: _T) -> None: ... def update(self, other: Iterable[_T]) -> None: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... - def __ior__(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] - def difference(self: Self, other: Iterable[_T]) -> Self: ... - def __sub__(self: Self, other: Iterable[Any]) -> Self: ... + def __ior__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def difference(self, other: Iterable[_T]) -> Self: ... + def __sub__(self, other: Iterable[Any]) -> Self: ... def difference_update(self, other: Iterable[Any]) -> None: ... - def __isub__(self: Self, other: Iterable[Any]) -> Self: ... - def intersection(self: Self, other: Iterable[_T]) -> Self: ... - def __and__(self: Self, other: Iterable[Any]) -> Self: ... + def __isub__(self, other: Iterable[Any]) -> Self: ... + def intersection(self, other: Iterable[_T]) -> Self: ... + def __and__(self, other: Iterable[Any]) -> Self: ... def intersection_update(self, other: Iterable[Any]) -> None: ... - def __iand__(self: Self, other: Iterable[Any]) -> Self: ... + def __iand__(self, other: Iterable[Any]) -> Self: ... def issubset(self, other: Iterable[_T]) -> bool: ... def __le__(self, other: Iterable[_T]) -> bool: ... def __lt__(self, other: Iterable[_T]) -> bool: ... @@ -43,7 +43,7 @@ class WeakSet(MutableSet[_T], Generic[_T]): def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __xor__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def symmetric_difference_update(self, other: Iterable[_T]) -> None: ... - def __ixor__(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def __ixor__(self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index 259293c51fd3..e21402b801c5 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -1,16 +1,19 @@ import sys +from _typeshed import ReadableBuffer from collections.abc import Sequence from typing import Any, NoReturn, overload from typing_extensions import Literal, final if sys.platform == "win32": - ABOVE_NORMAL_PRIORITY_CLASS: Literal[32768] - BELOW_NORMAL_PRIORITY_CLASS: Literal[16384] - CREATE_BREAKAWAY_FROM_JOB: Literal[16777216] - CREATE_DEFAULT_ERROR_MODE: Literal[67108864] - CREATE_NO_WINDOW: Literal[134217728] - CREATE_NEW_CONSOLE: Literal[16] - CREATE_NEW_PROCESS_GROUP: Literal[512] + ABOVE_NORMAL_PRIORITY_CLASS: Literal[0x8000] + BELOW_NORMAL_PRIORITY_CLASS: Literal[0x4000] + + CREATE_BREAKAWAY_FROM_JOB: Literal[0x1000000] + CREATE_DEFAULT_ERROR_MODE: Literal[0x4000000] + CREATE_NO_WINDOW: Literal[0x8000000] + CREATE_NEW_CONSOLE: Literal[0x10] + CREATE_NEW_PROCESS_GROUP: Literal[0x200] + DETACHED_PROCESS: Literal[8] DUPLICATE_CLOSE_SOURCE: Literal[1] DUPLICATE_SAME_ACCESS: Literal[2] @@ -27,40 +30,43 @@ if sys.platform == "win32": ERROR_PIPE_CONNECTED: Literal[535] ERROR_SEM_TIMEOUT: Literal[121] - FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[524288] - FILE_FLAG_OVERLAPPED: Literal[1073741824] + FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[0x80000] + FILE_FLAG_OVERLAPPED: Literal[0x40000000] + FILE_GENERIC_READ: Literal[1179785] FILE_GENERIC_WRITE: Literal[1179926] + if sys.version_info >= (3, 8): FILE_MAP_ALL_ACCESS: Literal[983071] FILE_MAP_COPY: Literal[1] FILE_MAP_EXECUTE: Literal[32] FILE_MAP_READ: Literal[4] FILE_MAP_WRITE: Literal[2] + FILE_TYPE_CHAR: Literal[2] FILE_TYPE_DISK: Literal[1] FILE_TYPE_PIPE: Literal[3] FILE_TYPE_REMOTE: Literal[32768] FILE_TYPE_UNKNOWN: Literal[0] - GENERIC_READ: Literal[2147483648] - GENERIC_WRITE: Literal[1073741824] - HIGH_PRIORITY_CLASS: Literal[128] - INFINITE: Literal[4294967295] + GENERIC_READ: Literal[0x80000000] + GENERIC_WRITE: Literal[0x40000000] + HIGH_PRIORITY_CLASS: Literal[0x80] + INFINITE: Literal[0xFFFFFFFF] if sys.version_info >= (3, 8): - INVALID_HANDLE_VALUE: int # very large number - IDLE_PRIORITY_CLASS: Literal[64] - NORMAL_PRIORITY_CLASS: Literal[32] - REALTIME_PRIORITY_CLASS: Literal[256] - NMPWAIT_WAIT_FOREVER: Literal[4294967295] + INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] + IDLE_PRIORITY_CLASS: Literal[0x40] + NORMAL_PRIORITY_CLASS: Literal[0x20] + REALTIME_PRIORITY_CLASS: Literal[0x100] + NMPWAIT_WAIT_FOREVER: Literal[0xFFFFFFFF] if sys.version_info >= (3, 8): - MEM_COMMIT: Literal[4096] - MEM_FREE: Literal[65536] - MEM_IMAGE: Literal[16777216] - MEM_MAPPED: Literal[262144] - MEM_PRIVATE: Literal[131072] - MEM_RESERVE: Literal[8192] + MEM_COMMIT: Literal[0x1000] + MEM_FREE: Literal[0x10000] + MEM_IMAGE: Literal[0x1000000] + MEM_MAPPED: Literal[0x40000] + MEM_PRIVATE: Literal[0x20000] + MEM_RESERVE: Literal[0x2000] NULL: Literal[0] OPEN_EXISTING: Literal[3] @@ -71,42 +77,47 @@ if sys.platform == "win32": PIPE_TYPE_MESSAGE: Literal[4] PIPE_UNLIMITED_INSTANCES: Literal[255] PIPE_WAIT: Literal[0] + if sys.version_info >= (3, 8): - PAGE_EXECUTE: Literal[16] - PAGE_EXECUTE_READ: Literal[32] - PAGE_EXECUTE_READWRITE: Literal[64] - PAGE_EXECUTE_WRITECOPY: Literal[128] - PAGE_GUARD: Literal[256] - PAGE_NOACCESS: Literal[1] - PAGE_NOCACHE: Literal[512] - PAGE_READONLY: Literal[2] - PAGE_READWRITE: Literal[4] - PAGE_WRITECOMBINE: Literal[1024] - PAGE_WRITECOPY: Literal[8] - - PROCESS_ALL_ACCESS: Literal[2097151] - PROCESS_DUP_HANDLE: Literal[64] + PAGE_EXECUTE: Literal[0x10] + PAGE_EXECUTE_READ: Literal[0x20] + PAGE_EXECUTE_READWRITE: Literal[0x40] + PAGE_EXECUTE_WRITECOPY: Literal[0x80] + PAGE_GUARD: Literal[0x100] + PAGE_NOACCESS: Literal[0x1] + PAGE_NOCACHE: Literal[0x200] + PAGE_READONLY: Literal[0x2] + PAGE_READWRITE: Literal[0x4] + PAGE_WRITECOMBINE: Literal[0x400] + PAGE_WRITECOPY: Literal[0x8] + + PROCESS_ALL_ACCESS: Literal[0x1FFFFF] + PROCESS_DUP_HANDLE: Literal[0x40] + if sys.version_info >= (3, 8): - SEC_COMMIT: Literal[134217728] - SEC_IMAGE: Literal[16777216] - SEC_LARGE_PAGES: Literal[2147483648] - SEC_NOCACHE: Literal[268435456] - SEC_RESERVE: Literal[67108864] - SEC_WRITECOMBINE: Literal[1073741824] - STARTF_USESHOWWINDOW: Literal[1] - STARTF_USESTDHANDLES: Literal[256] - STD_ERROR_HANDLE: Literal[4294967284] - STD_INPUT_HANDLE: Literal[4294967286] - STD_OUTPUT_HANDLE: Literal[4294967285] + SEC_COMMIT: Literal[0x8000000] + SEC_IMAGE: Literal[0x1000000] + SEC_LARGE_PAGES: Literal[0x80000000] + SEC_NOCACHE: Literal[0x10000000] + SEC_RESERVE: Literal[0x4000000] + SEC_WRITECOMBINE: Literal[0x40000000] + + STARTF_USESHOWWINDOW: Literal[0x1] + STARTF_USESTDHANDLES: Literal[0x100] + + STD_ERROR_HANDLE: Literal[0xFFFFFFF4] + STD_OUTPUT_HANDLE: Literal[0xFFFFFFF5] + STD_INPUT_HANDLE: Literal[0xFFFFFFF6] + STILL_ACTIVE: Literal[259] SW_HIDE: Literal[0] if sys.version_info >= (3, 8): - SYNCHRONIZE: Literal[1048576] + SYNCHRONIZE: Literal[0x100000] WAIT_ABANDONED_0: Literal[128] WAIT_OBJECT_0: Literal[0] WAIT_TIMEOUT: Literal[258] - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 10): LOCALE_NAME_INVARIANT: str LOCALE_NAME_MAX_LENGTH: int LOCALE_NAME_SYSTEM_DEFAULT: str @@ -127,7 +138,7 @@ if sys.platform == "win32": @overload def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... @overload - def ConnectNamedPipe(handle: int, overlapped: Literal[False] = ...) -> None: ... + def ConnectNamedPipe(handle: int, overlapped: Literal[False] = False) -> None: ... @overload def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ... def CreateFile( @@ -168,7 +179,7 @@ if sys.platform == "win32": __target_process_handle: int, __desired_access: int, __inherit_handle: bool, - __options: int = ..., + __options: int = 0, ) -> int: ... def ExitProcess(__ExitCode: int) -> NoReturn: ... def GetACP() -> int: ... @@ -180,29 +191,30 @@ if sys.platform == "win32": def GetStdHandle(__std_handle: int) -> int: ... def GetVersion() -> int: ... def OpenProcess(__desired_access: int, __inherit_handle: bool, __process_id: int) -> int: ... - def PeekNamedPipe(__handle: int, __size: int = ...) -> tuple[int, int] | tuple[bytes, int, int]: ... - if sys.version_info >= (3, 11): + def PeekNamedPipe(__handle: int, __size: int = 0) -> tuple[int, int] | tuple[bytes, int, int]: ... + if sys.version_info >= (3, 10): def LCMapStringEx(locale: str, flags: int, src: str) -> str: ... + def UnmapViewOfFile(__address: int) -> None: ... @overload def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @overload - def ReadFile(handle: int, size: int, overlapped: Literal[False] = ...) -> tuple[bytes, int]: ... + def ReadFile(handle: int, size: int, overlapped: Literal[False] = False) -> tuple[bytes, int]: ... @overload def ReadFile(handle: int, size: int, overlapped: int | bool) -> tuple[Any, int]: ... def SetNamedPipeHandleState( __named_pipe: int, __mode: int | None, __max_collection_count: int | None, __collect_data_timeout: int | None ) -> None: ... def TerminateProcess(__handle: int, __exit_code: int) -> None: ... - def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = ...) -> int: ... + def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = 0xFFFFFFFF) -> int: ... def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... def WaitNamedPipe(__name: str, __timeout: int) -> None: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: Literal[False] = ...) -> tuple[int, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: Literal[False] = False) -> tuple[int, int]: ... @overload - def WriteFile(handle: int, buffer: bytes, overlapped: int | bool) -> tuple[Any, int]: ... + def WriteFile(handle: int, buffer: ReadableBuffer, overlapped: int | bool) -> tuple[Any, int]: ... @final class Overlapped: event: int diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index f7f82333a362..068dab4752be 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -1,5 +1,6 @@ +import _typeshed import sys -from _typeshed import Self, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Callable from typing import Any, Generic, TypeVar from typing_extensions import Literal @@ -13,17 +14,16 @@ class ABCMeta(type): __abstractmethods__: frozenset[str] if sys.version_info >= (3, 11): def __new__( - __mcls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any - ) -> Self: ... + __mcls: type[_typeshed.Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... else: - # pyright doesn't like the first parameter being called mcls, hence the `pyright: ignore` def __new__( - mcls: type[Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any # pyright: ignore - ) -> Self: ... + mcls: type[_typeshed.Self], name: str, bases: tuple[type, ...], namespace: dict[str, Any], **kwargs: Any + ) -> _typeshed.Self: ... - def __instancecheck__(cls: ABCMeta, instance: Any) -> Any: ... - def __subclasscheck__(cls: ABCMeta, subclass: Any) -> Any: ... - def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = ...) -> None: ... + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... def abstractmethod(funcobj: _FuncT) -> _FuncT: ... diff --git a/mypy/typeshed/stdlib/aifc.pyi b/mypy/typeshed/stdlib/aifc.pyi index 14e824f3d22e..ab0c18ed6623 100644 --- a/mypy/typeshed/stdlib/aifc.pyi +++ b/mypy/typeshed/stdlib/aifc.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from types import TracebackType from typing import IO, Any, NamedTuple, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): __all__ = ["Error", "open"] @@ -24,7 +23,7 @@ _Marker: TypeAlias = tuple[int, int, bytes] class Aifc_read: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -48,7 +47,7 @@ class Aifc_read: class Aifc_write: def __init__(self, f: _File) -> None: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -81,7 +80,7 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Aifc_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload -def open(f: _File, mode: str | None = ...) -> Any: ... +def open(f: _File, mode: str | None = None) -> Any: ... if sys.version_info < (3, 9): @overload @@ -89,4 +88,4 @@ if sys.version_info < (3, 9): @overload def openfp(f: _File, mode: Literal["w", "wb"]) -> Aifc_write: ... @overload - def openfp(f: _File, mode: str | None = ...) -> Any: ... + def openfp(f: _File, mode: str | None = None) -> Any: ... diff --git a/mypy/typeshed/stdlib/antigravity.pyi b/mypy/typeshed/stdlib/antigravity.pyi index e30917511030..3986e7d1c9f2 100644 --- a/mypy/typeshed/stdlib/antigravity.pyi +++ b/mypy/typeshed/stdlib/antigravity.pyi @@ -1 +1,3 @@ -def geohash(latitude: float, longitude: float, datedow: bytes) -> None: ... +from _typeshed import ReadableBuffer + +def geohash(latitude: float, longitude: float, datedow: ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index 1b86a4e10cbb..20d9dfa9d137 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -78,7 +78,7 @@ class _ActionsContainer: _has_negative_number_optionals: list[bool] def __init__(self, description: str | None, prefix_chars: str, argument_default: Any, conflict_handler: str) -> None: ... def register(self, registry_name: str, value: Any, object: Any) -> None: ... - def _registry_get(self, registry_name: str, value: Any, default: Any = ...) -> Any: ... + def _registry_get(self, registry_name: str, value: Any, default: Any = None) -> Any: ... def set_defaults(self, **kwargs: Any) -> None: ... def get_default(self, dest: str) -> Any: ... def add_argument( @@ -104,7 +104,7 @@ class _ActionsContainer: def _add_container_actions(self, container: _ActionsContainer) -> None: ... def _get_positional_kwargs(self, dest: str, **kwargs: Any) -> dict[str, Any]: ... def _get_optional_kwargs(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... - def _pop_action_class(self, kwargs: Any, default: type[Action] | None = ...) -> type[Action]: ... + def _pop_action_class(self, kwargs: Any, default: type[Action] | None = None) -> type[Action]: ... def _get_handler(self) -> Callable[[Action, Iterable[tuple[str, Action]]], Any]: ... def _check_conflict(self, action: Action) -> None: ... def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> NoReturn: ... @@ -127,43 +127,44 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): _optionals: _ArgumentGroup _subparsers: _ArgumentGroup | None + # Note: the constructor arguments are also used in _SubParsersAction.add_parser. if sys.version_info >= (3, 9): def __init__( self, - prog: str | None = ..., - usage: str | None = ..., - description: str | None = ..., - epilog: str | None = ..., + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, parents: Sequence[ArgumentParser] = ..., formatter_class: _FormatterClass = ..., - prefix_chars: str = ..., - fromfile_prefix_chars: str | None = ..., - argument_default: Any = ..., - conflict_handler: str = ..., - add_help: bool = ..., - allow_abbrev: bool = ..., - exit_on_error: bool = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, + exit_on_error: bool = True, ) -> None: ... else: def __init__( self, - prog: str | None = ..., - usage: str | None = ..., - description: str | None = ..., - epilog: str | None = ..., + prog: str | None = None, + usage: str | None = None, + description: str | None = None, + epilog: str | None = None, parents: Sequence[ArgumentParser] = ..., formatter_class: _FormatterClass = ..., - prefix_chars: str = ..., - fromfile_prefix_chars: str | None = ..., - argument_default: Any = ..., - conflict_handler: str = ..., - add_help: bool = ..., - allow_abbrev: bool = ..., + prefix_chars: str = "-", + fromfile_prefix_chars: str | None = None, + argument_default: Any = None, + conflict_handler: str = "error", + add_help: bool = True, + allow_abbrev: bool = True, ) -> None: ... # The type-ignores in these overloads should be temporary. See: # https://github.com/python/typeshed/pull/2643#issuecomment-442280277 @overload - def parse_args(self, args: Sequence[str] | None = ...) -> Namespace: ... + def parse_args(self, args: Sequence[str] | None = None) -> Namespace: ... @overload def parse_args(self, args: Sequence[str] | None, namespace: None) -> Namespace: ... # type: ignore[misc] @overload @@ -201,19 +202,19 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): help: str | None = ..., metavar: str | None = ..., ) -> _SubParsersAction[_ArgumentParserT]: ... - def print_usage(self, file: IO[str] | None = ...) -> None: ... - def print_help(self, file: IO[str] | None = ...) -> None: ... + def print_usage(self, file: IO[str] | None = None) -> None: ... + def print_help(self, file: IO[str] | None = None) -> None: ... def format_usage(self) -> str: ... def format_help(self) -> str: ... def parse_known_args( - self, args: Sequence[str] | None = ..., namespace: Namespace | None = ... + self, args: Sequence[str] | None = None, namespace: Namespace | None = None ) -> tuple[Namespace, list[str]]: ... def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... - def exit(self, status: int = ..., message: str | None = ...) -> NoReturn: ... + def exit(self, status: int = 0, message: str | None = None) -> NoReturn: ... def error(self, message: str) -> NoReturn: ... - def parse_intermixed_args(self, args: Sequence[str] | None = ..., namespace: Namespace | None = ...) -> Namespace: ... + def parse_intermixed_args(self, args: Sequence[str] | None = None, namespace: Namespace | None = None) -> Namespace: ... def parse_known_intermixed_args( - self, args: Sequence[str] | None = ..., namespace: Namespace | None = ... + self, args: Sequence[str] | None = None, namespace: Namespace | None = None ) -> tuple[Namespace, list[str]]: ... # undocumented def _get_optional_actions(self) -> list[Action]: ... @@ -229,7 +230,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def _get_value(self, action: Action, arg_string: str) -> Any: ... def _check_value(self, action: Action, value: Any) -> None: ... def _get_formatter(self) -> HelpFormatter: ... - def _print_message(self, message: str, file: IO[str] | None = ...) -> None: ... + def _print_message(self, message: str, file: IO[str] | None = None) -> None: ... class HelpFormatter: # undocumented @@ -245,7 +246,7 @@ class HelpFormatter: _whitespace_matcher: Pattern[str] _long_break_matcher: Pattern[str] _Section: type[Any] # Nested class - def __init__(self, prog: str, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ...) -> None: ... + def __init__(self, prog: str, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None) -> None: ... def _indent(self) -> None: ... def _dedent(self) -> None: ... def _add_item(self, func: Callable[..., str], args: Iterable[Any]) -> None: ... @@ -253,7 +254,7 @@ class HelpFormatter: def end_section(self) -> None: ... def add_text(self, text: str | None) -> None: ... def add_usage( - self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None = ... + self, usage: str | None, actions: Iterable[Action], groups: Iterable[_ArgumentGroup], prefix: str | None = None ) -> None: ... def add_argument(self, action: Action) -> None: ... def add_arguments(self, actions: Iterable[Action]) -> None: ... @@ -296,17 +297,17 @@ class Action(_AttributeHolder): self, option_strings: Sequence[str], dest: str, - nargs: int | str | None = ..., - const: _T | None = ..., - default: _T | str | None = ..., - type: Callable[[str], _T] | FileType | None = ..., - choices: Iterable[_T] | None = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + nargs: int | str | None = None, + const: _T | None = None, + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... def __call__( - self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = ... + self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = None ) -> None: ... if sys.version_info >= (3, 9): def format_usage(self) -> str: ... @@ -317,12 +318,12 @@ if sys.version_info >= (3, 9): self, option_strings: Sequence[str], dest: str, - default: _T | str | None = ..., - type: Callable[[str], _T] | FileType | None = ..., - choices: Iterable[_T] | None = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + default: _T | str | None = None, + type: Callable[[str], _T] | FileType | None = None, + choices: Iterable[_T] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... class Namespace(_AttributeHolder): @@ -338,7 +339,7 @@ class FileType: _bufsize: int _encoding: str | None _errors: str | None - def __init__(self, mode: str = ..., bufsize: int = ..., encoding: str | None = ..., errors: str | None = ...) -> None: ... + def __init__(self, mode: str = "r", bufsize: int = -1, encoding: str | None = None, errors: str | None = None) -> None: ... def __call__(self, string: str) -> IO[Any]: ... # undocumented @@ -346,14 +347,14 @@ class _ArgumentGroup(_ActionsContainer): title: str | None _group_actions: list[Action] def __init__( - self, container: _ActionsContainer, title: str | None = ..., description: str | None = ..., **kwargs: Any + self, container: _ActionsContainer, title: str | None = None, description: str | None = None, **kwargs: Any ) -> None: ... # undocumented class _MutuallyExclusiveGroup(_ArgumentGroup): required: bool _container: _ActionsContainer - def __init__(self, container: _ActionsContainer, required: bool = ...) -> None: ... + def __init__(self, container: _ActionsContainer, required: bool = False) -> None: ... # undocumented class _StoreAction(Action): ... @@ -365,11 +366,11 @@ class _StoreConstAction(Action): self, option_strings: Sequence[str], dest: str, - const: Any | None = ..., - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... else: def __init__( @@ -377,27 +378,31 @@ class _StoreConstAction(Action): option_strings: Sequence[str], dest: str, const: Any, - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... # undocumented class _StoreTrueAction(_StoreConstAction): def __init__( - self, option_strings: Sequence[str], dest: str, default: bool = ..., required: bool = ..., help: str | None = ... + self, option_strings: Sequence[str], dest: str, default: bool = False, required: bool = False, help: str | None = None ) -> None: ... # undocumented class _StoreFalseAction(_StoreConstAction): def __init__( - self, option_strings: Sequence[str], dest: str, default: bool = ..., required: bool = ..., help: str | None = ... + self, option_strings: Sequence[str], dest: str, default: bool = True, required: bool = False, help: str | None = None ) -> None: ... # undocumented class _AppendAction(Action): ... +# undocumented +if sys.version_info >= (3, 8): + class _ExtendAction(_AppendAction): ... + # undocumented class _AppendConstAction(Action): if sys.version_info >= (3, 11): @@ -405,11 +410,11 @@ class _AppendConstAction(Action): self, option_strings: Sequence[str], dest: str, - const: Any | None = ..., - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + const: Any | None = None, + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... else: def __init__( @@ -417,27 +422,34 @@ class _AppendConstAction(Action): option_strings: Sequence[str], dest: str, const: Any, - default: Any = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + default: Any = None, + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... # undocumented class _CountAction(Action): def __init__( - self, option_strings: Sequence[str], dest: str, default: Any = ..., required: bool = ..., help: str | None = ... + self, option_strings: Sequence[str], dest: str, default: Any = None, required: bool = False, help: str | None = None ) -> None: ... # undocumented class _HelpAction(Action): - def __init__(self, option_strings: Sequence[str], dest: str = ..., default: str = ..., help: str | None = ...) -> None: ... + def __init__( + self, option_strings: Sequence[str], dest: str = "==SUPPRESS==", default: str = "==SUPPRESS==", help: str | None = None + ) -> None: ... # undocumented class _VersionAction(Action): version: str | None def __init__( - self, option_strings: Sequence[str], version: str | None = ..., dest: str = ..., default: str = ..., help: str = ... + self, + option_strings: Sequence[str], + version: str | None = None, + dest: str = "==SUPPRESS==", + default: str = "==SUPPRESS==", + help: str = "show program's version number and exit", ) -> None: ... # undocumented @@ -453,13 +465,58 @@ class _SubParsersAction(Action, Generic[_ArgumentParserT]): option_strings: Sequence[str], prog: str, parser_class: type[_ArgumentParserT], - dest: str = ..., - required: bool = ..., - help: str | None = ..., - metavar: str | tuple[str, ...] | None = ..., + dest: str = "==SUPPRESS==", + required: bool = False, + help: str | None = None, + metavar: str | tuple[str, ...] | None = None, ) -> None: ... - # TODO: Type keyword args properly. - def add_parser(self, name: str, **kwargs: Any) -> _ArgumentParserT: ... + + # Note: `add_parser` accepts all kwargs of `ArgumentParser.__init__`. It also + # accepts its own `help` and `aliases` kwargs. + if sys.version_info >= (3, 9): + def add_parser( + self, + name: str, + *, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = ..., + allow_abbrev: bool = ..., + exit_on_error: bool = ..., + ) -> _ArgumentParserT: ... + else: + def add_parser( + self, + name: str, + *, + help: str | None = ..., + aliases: Sequence[str] = ..., + # Kwargs from ArgumentParser constructor + prog: str | None = ..., + usage: str | None = ..., + description: str | None = ..., + epilog: str | None = ..., + parents: Sequence[_ArgumentParserT] = ..., + formatter_class: _FormatterClass = ..., + prefix_chars: str = ..., + fromfile_prefix_chars: str | None = ..., + argument_default: Any = ..., + conflict_handler: str = ..., + add_help: bool = ..., + allow_abbrev: bool = ..., + ) -> _ArgumentParserT: ... + def _get_subactions(self) -> list[Action]: ... # undocumented diff --git a/mypy/typeshed/stdlib/array.pyi b/mypy/typeshed/stdlib/array.pyi index 2d27cd72e8df..827bbb97897f 100644 --- a/mypy/typeshed/stdlib/array.pyi +++ b/mypy/typeshed/stdlib/array.pyi @@ -1,10 +1,10 @@ import sys -from _typeshed import ReadableBuffer, Self, SupportsRead, SupportsWrite +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Iterable # pytype crashes if array inherits from collections.abc.MutableSequence instead of typing.MutableSequence -from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y027 -from typing_extensions import Literal, SupportsIndex, TypeAlias +from typing import Any, Generic, MutableSequence, TypeVar, overload # noqa: Y022 +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias _IntTypeCode: TypeAlias = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] _FloatTypeCode: TypeAlias = Literal["f", "d"] @@ -21,15 +21,19 @@ class array(MutableSequence[_T], Generic[_T]): @property def itemsize(self) -> int: ... @overload - def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | Iterable[int] = ...) -> None: ... + def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | bytearray | Iterable[int] = ...) -> None: ... @overload - def __init__(self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | Iterable[float] = ...) -> None: ... + def __init__( + self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | bytearray | Iterable[float] = ... + ) -> None: ... @overload - def __init__(self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[str] = ...) -> None: ... + def __init__( + self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | bytearray | Iterable[str] = ... + ) -> None: ... @overload def __init__(self, __typecode: str, __initializer: Iterable[_T]) -> None: ... @overload - def __init__(self, __typecode: str, __initializer: bytes = ...) -> None: ... + def __init__(self, __typecode: str, __initializer: bytes | bytearray = ...) -> None: ... def append(self, __v: _T) -> None: ... def buffer_info(self) -> tuple[int, int]: ... def byteswap(self) -> None: ... @@ -40,19 +44,19 @@ class array(MutableSequence[_T], Generic[_T]): def fromlist(self, __list: list[_T]) -> None: ... def fromunicode(self, __ustr: str) -> None: ... if sys.version_info >= (3, 10): - def index(self, __v: _T, __start: int = ..., __stop: int = ...) -> int: ... + def index(self, __v: _T, __start: int = 0, __stop: int = sys.maxsize) -> int: ... else: def index(self, __v: _T) -> int: ... # type: ignore[override] def insert(self, __i: int, __v: _T) -> None: ... - def pop(self, __i: int = ...) -> _T: ... + def pop(self, __i: int = -1) -> _T: ... def remove(self, __v: _T) -> None: ... def tobytes(self) -> bytes: ... def tofile(self, __f: SupportsWrite[bytes]) -> None: ... def tolist(self) -> list[_T]: ... def tounicode(self) -> str: ... if sys.version_info < (3, 9): - def fromstring(self, __buffer: bytes) -> None: ... + def fromstring(self, __buffer: str | ReadableBuffer) -> None: ... def tostring(self) -> bytes: ... def __len__(self) -> int: ... @@ -68,8 +72,8 @@ class array(MutableSequence[_T], Generic[_T]): def __add__(self, __x: array[_T]) -> array[_T]: ... def __ge__(self, __other: array[_T]) -> bool: ... def __gt__(self, __other: array[_T]) -> bool: ... - def __iadd__(self: Self, __x: array[_T]) -> Self: ... # type: ignore[override] - def __imul__(self: Self, __n: int) -> Self: ... + def __iadd__(self, __x: array[_T]) -> Self: ... # type: ignore[override] + def __imul__(self, __n: int) -> Self: ... def __le__(self, __other: array[_T]) -> bool: ... def __lt__(self, __other: array[_T]) -> bool: ... def __mul__(self, __n: int) -> array[_T]: ... diff --git a/mypy/typeshed/stdlib/ast.pyi b/mypy/typeshed/stdlib/ast.pyi index 6c9dbd0162b8..ea899e150f97 100644 --- a/mypy/typeshed/stdlib/ast.pyi +++ b/mypy/typeshed/stdlib/ast.pyi @@ -1,5 +1,7 @@ +import os import sys from _ast import * +from _typeshed import ReadableBuffer, Unused from collections.abc import Iterator from typing import Any, TypeVar, overload from typing_extensions import Literal @@ -7,10 +9,10 @@ from typing_extensions import Literal if sys.version_info >= (3, 8): class _ABC(type): if sys.version_info >= (3, 9): - def __init__(cls, *args: object) -> None: ... + def __init__(cls, *args: Unused) -> None: ... class Num(Constant, metaclass=_ABC): - value: complex + value: int | float | complex class Str(Constant, metaclass=_ABC): value: str @@ -87,6 +89,7 @@ class NodeVisitor: def visit_Constant(self, node: Constant) -> Any: ... if sys.version_info >= (3, 8): def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_TypeIgnore(self, node: TypeIgnore) -> Any: ... def visit_Attribute(self, node: Attribute) -> Any: ... def visit_Subscript(self, node: Subscript) -> Any: ... @@ -133,6 +136,19 @@ class NodeVisitor: def visit_keyword(self, node: keyword) -> Any: ... def visit_alias(self, node: alias) -> Any: ... def visit_withitem(self, node: withitem) -> Any: ... + if sys.version_info >= (3, 10): + def visit_Match(self, node: Match) -> Any: ... + def visit_MatchValue(self, node: MatchValue) -> Any: ... + def visit_MatchSequence(self, node: MatchSequence) -> Any: ... + def visit_MatchStar(self, node: MatchStar) -> Any: ... + def visit_MatchMapping(self, node: MatchMapping) -> Any: ... + def visit_MatchClass(self, node: MatchClass) -> Any: ... + def visit_MatchAs(self, node: MatchAs) -> Any: ... + def visit_MatchOr(self, node: MatchOr) -> Any: ... + + if sys.version_info >= (3, 11): + def visit_TryStar(self, node: TryStar) -> Any: ... + # visit methods for deprecated nodes def visit_ExtSlice(self, node: ExtSlice) -> Any: ... def visit_Index(self, node: Index) -> Any: ... @@ -157,87 +173,97 @@ _T = TypeVar("_T", bound=AST) if sys.version_info >= (3, 8): @overload def parse( - source: str | bytes, - filename: str | bytes = ..., - mode: Literal["exec"] = ..., + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Module: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"], *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Expression: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["func_type"], *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> FunctionType: ... @overload def parse( - source: str | bytes, - filename: str | bytes, + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"], *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Interactive: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["eval"], - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Expression: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["func_type"], - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> FunctionType: ... @overload def parse( - source: str | bytes, + source: str | ReadableBuffer, *, mode: Literal["single"], - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> Interactive: ... @overload def parse( - source: str | bytes, - filename: str | bytes = ..., - mode: str = ..., + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: str = "exec", *, - type_comments: bool = ..., - feature_version: None | int | tuple[int, int] = ..., + type_comments: bool = False, + feature_version: None | int | tuple[int, int] = None, ) -> AST: ... else: @overload - def parse(source: str | bytes, filename: str | bytes = ..., mode: Literal["exec"] = ...) -> Module: ... + def parse( + source: str | ReadableBuffer, + filename: str | ReadableBuffer | os.PathLike[Any] = "", + mode: Literal["exec"] = "exec", + ) -> Module: ... @overload - def parse(source: str | bytes, filename: str | bytes, mode: Literal["eval"]) -> Expression: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["eval"] + ) -> Expression: ... @overload - def parse(source: str | bytes, filename: str | bytes, mode: Literal["single"]) -> Interactive: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any], mode: Literal["single"] + ) -> Interactive: ... @overload - def parse(source: str | bytes, *, mode: Literal["eval"]) -> Expression: ... + def parse(source: str | ReadableBuffer, *, mode: Literal["eval"]) -> Expression: ... @overload - def parse(source: str | bytes, *, mode: Literal["single"]) -> Interactive: ... + def parse(source: str | ReadableBuffer, *, mode: Literal["single"]) -> Interactive: ... @overload - def parse(source: str | bytes, filename: str | bytes = ..., mode: str = ...) -> AST: ... + def parse( + source: str | ReadableBuffer, filename: str | ReadableBuffer | os.PathLike[Any] = "", mode: str = "exec" + ) -> AST: ... if sys.version_info >= (3, 9): def unparse(ast_obj: AST) -> str: ... @@ -246,21 +272,21 @@ def copy_location(new_node: _T, old_node: AST) -> _T: ... if sys.version_info >= (3, 9): def dump( - node: AST, annotate_fields: bool = ..., include_attributes: bool = ..., *, indent: int | str | None = ... + node: AST, annotate_fields: bool = True, include_attributes: bool = False, *, indent: int | str | None = None ) -> str: ... else: - def dump(node: AST, annotate_fields: bool = ..., include_attributes: bool = ...) -> str: ... + def dump(node: AST, annotate_fields: bool = True, include_attributes: bool = False) -> str: ... def fix_missing_locations(node: _T) -> _T: ... -def get_docstring(node: AST, clean: bool = ...) -> str | None: ... -def increment_lineno(node: _T, n: int = ...) -> _T: ... +def get_docstring(node: AsyncFunctionDef | FunctionDef | ClassDef | Module, clean: bool = True) -> str | None: ... +def increment_lineno(node: _T, n: int = 1) -> _T: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... def literal_eval(node_or_string: str | AST) -> Any: ... if sys.version_info >= (3, 8): - def get_source_segment(source: str, node: AST, *, padded: bool = ...) -> str | None: ... + def get_source_segment(source: str, node: AST, *, padded: bool = False) -> str | None: ... def walk(node: AST) -> Iterator[AST]: ... diff --git a/mypy/typeshed/stdlib/asynchat.pyi b/mypy/typeshed/stdlib/asynchat.pyi index 4d43b02c056c..79a70d1c1ec8 100644 --- a/mypy/typeshed/stdlib/asynchat.pyi +++ b/mypy/typeshed/stdlib/asynchat.pyi @@ -2,7 +2,7 @@ import asyncore from abc import abstractmethod class simple_producer: - def __init__(self, data: bytes, buffer_size: int = ...) -> None: ... + def __init__(self, data: bytes, buffer_size: int = 512) -> None: ... def more(self) -> bytes: ... class async_chat(asyncore.dispatcher): diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 8697bfe306c4..3b8f286710b9 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -1,11 +1,11 @@ import ssl import sys -from _typeshed import FileDescriptorLike, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory from asyncio.futures import Future from asyncio.protocols import BaseProtocol from asyncio.tasks import Task -from asyncio.transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport +from asyncio.transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from collections.abc import Awaitable, Callable, Coroutine, Generator, Iterable, Sequence from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket @@ -34,7 +34,7 @@ class Server(AbstractServer): ssl_context: _SSLContext, backlog: int, ssl_handshake_timeout: float | None, - ssl_shutdown_timeout: float | None = ..., + ssl_shutdown_timeout: float | None = None, ) -> None: ... else: def __init__( @@ -74,42 +74,44 @@ class BaseEventLoop(AbstractEventLoop): def close(self) -> None: ... async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. - def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... def call_later( - self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = ... + self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> TimerHandle: ... + def call_at( + self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = None ) -> TimerHandle: ... - def call_at(self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> TimerHandle: ... def time(self) -> float: ... # Future methods def create_future(self) -> Future[Any]: ... # Tasks methods if sys.version_info >= (3, 11): def create_task( - self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ..., context: Context | None = ... + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = None, context: Context | None = None ) -> Task[_T]: ... elif sys.version_info >= (3, 8): - def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ...) -> Task[_T]: ... + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = None) -> Task[_T]: ... else: def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... def set_task_factory(self, factory: _TaskFactory | None) -> None: ... def get_task_factory(self) -> _TaskFactory | None: ... # Methods for interacting with threads - def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Future[_T]: ... def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. async def getaddrinfo( self, host: bytes | str | None, - port: str | int | None, + port: bytes | str | int | None, *, - family: int = ..., - type: int = ..., - proto: int = ..., - flags: int = ..., + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... - async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... if sys.version_info >= (3, 11): @overload async def create_connection( @@ -118,37 +120,37 @@ class BaseEventLoop(AbstractEventLoop): host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload async def create_connection( @@ -157,35 +159,35 @@ class BaseEventLoop(AbstractEventLoop): host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_connection( @@ -194,67 +196,67 @@ class BaseEventLoop(AbstractEventLoop): host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @overload async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... async def start_tls( self, @@ -262,54 +264,54 @@ class BaseEventLoop(AbstractEventLoop): protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - ) -> BaseTransport: ... + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... else: @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... async def start_tls( self, @@ -317,54 +319,54 @@ class BaseEventLoop(AbstractEventLoop): protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> BaseTransport: ... + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport: ... async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... async def sock_sendfile( - self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = True ) -> int: ... async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True ) -> int: ... if sys.version_info >= (3, 11): async def create_datagram_endpoint( # type: ignore[override] self, protocol_factory: Callable[[], _ProtocolT], - local_addr: tuple[str, int] | None = ..., - remote_addr: tuple[str, int] | None = ..., + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, *, - family: int = ..., - proto: int = ..., - flags: int = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... else: async def create_datagram_endpoint( self, protocol_factory: Callable[[], _ProtocolT], - local_addr: tuple[str, int] | None = ..., - remote_addr: tuple[str, int] | None = ..., + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, *, - family: int = ..., - proto: int = ..., - flags: int = ..., + family: int = 0, + proto: int = 0, + flags: int = 0, reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. async def connect_read_pipe( self, protocol_factory: Callable[[], _ProtocolT], pipe: Any @@ -377,15 +379,15 @@ class BaseEventLoop(AbstractEventLoop): protocol_factory: Callable[[], _ProtocolT], cmd: bytes | str, *, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., - text: Literal[False, None] = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... async def subprocess_exec( @@ -393,14 +395,14 @@ class BaseEventLoop(AbstractEventLoop): protocol_factory: Callable[[], _ProtocolT], program: Any, *args: Any, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... @@ -411,13 +413,13 @@ class BaseEventLoop(AbstractEventLoop): # BaseEventLoop, only on subclasses. We list them here for now for convenience. async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... - async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... async def sock_connect(self, sock: socket, address: _Address) -> None: ... async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... if sys.version_info >= (3, 11): async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... - async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... - async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi b/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi index d3ab16a3edd2..597c8302988e 100644 --- a/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_subprocess.pyi @@ -9,7 +9,6 @@ from . import events, futures, protocols, transports _File: TypeAlias = int | IO[Any] | None class BaseSubprocessTransport(transports.SubprocessTransport): - _closed: bool # undocumented _protocol: protocols.SubprocessProtocol # undocumented _loop: events.AbstractEventLoop # undocumented @@ -30,8 +29,8 @@ class BaseSubprocessTransport(transports.SubprocessTransport): stdout: _File, stderr: _File, bufsize: int, - waiter: futures.Future[Any] | None = ..., - extra: Any | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Any | None = None, **kwargs: Any, ) -> None: ... def _start( diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index 586116136c1a..f97afe873c9f 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -1,18 +1,18 @@ import ssl import sys -from _typeshed import FileDescriptorLike, Self, StrPath, WriteableBuffer +from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer from abc import ABCMeta, abstractmethod from collections.abc import Awaitable, Callable, Coroutine, Generator, Sequence from contextvars import Context from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket from typing import IO, Any, Protocol, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias from .base_events import Server from .futures import Future from .protocols import BaseProtocol from .tasks import Task -from .transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport +from .transports import BaseTransport, DatagramTransport, ReadTransport, SubprocessTransport, Transport, WriteTransport from .unix_events import AbstractChildWatcher if sys.version_info >= (3, 8): @@ -70,7 +70,7 @@ class Handle: _cancelled: bool _args: Sequence[Any] def __init__( - self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = ... + self, callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, context: Context | None = None ) -> None: ... def cancel(self) -> None: ... def _run(self) -> None: ... @@ -83,7 +83,7 @@ class TimerHandle(Handle): callback: Callable[..., object], args: Sequence[Any], loop: AbstractEventLoop, - context: Context | None = ..., + context: Context | None = None, ) -> None: ... def when(self) -> float: ... def __lt__(self, other: TimerHandle) -> bool: ... @@ -95,8 +95,8 @@ class TimerHandle(Handle): class AbstractServer: @abstractmethod def close(self) -> None: ... - async def __aenter__(self: Self) -> Self: ... - async def __aexit__(self, *exc: object) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *exc: Unused) -> None: ... @abstractmethod def get_loop(self) -> AbstractEventLoop: ... @abstractmethod @@ -132,14 +132,14 @@ class AbstractEventLoop: # Methods scheduling callbacks. All these return Handles. if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 @abstractmethod - def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... @abstractmethod def call_later( - self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = ... + self, delay: float, callback: Callable[..., object], *args: Any, context: Context | None = None ) -> TimerHandle: ... @abstractmethod def call_at( - self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = ... + self, when: float, callback: Callable[..., object], *args: Any, context: Context | None = None ) -> TimerHandle: ... else: @abstractmethod @@ -161,13 +161,13 @@ class AbstractEventLoop: self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, - name: str | None = ..., - context: Context | None = ..., + name: str | None = None, + context: Context | None = None, ) -> Task[_T]: ... elif sys.version_info >= (3, 8): @abstractmethod def create_task( - self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = ... + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = None ) -> Task[_T]: ... else: @abstractmethod @@ -180,7 +180,7 @@ class AbstractEventLoop: # Methods for interacting with threads if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 @abstractmethod - def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = ...) -> Handle: ... + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any, context: Context | None = None) -> Handle: ... else: @abstractmethod def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any) -> Handle: ... @@ -194,15 +194,15 @@ class AbstractEventLoop: async def getaddrinfo( self, host: bytes | str | None, - port: str | int | None, + port: bytes | str | int | None, *, - family: int = ..., - type: int = ..., - proto: int = ..., - flags: int = ..., + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... @abstractmethod - async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0) -> tuple[str, str]: ... if sys.version_info >= (3, 11): @overload @abstractmethod @@ -212,38 +212,38 @@ class AbstractEventLoop: host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 8): @overload @abstractmethod @@ -253,36 +253,36 @@ class AbstractEventLoop: host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - happy_eyeballs_delay: float | None = ..., - interleave: int | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... else: @overload @abstractmethod @@ -292,94 +292,94 @@ class AbstractEventLoop: host: str = ..., port: int = ..., *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., - sock: None = ..., - local_addr: tuple[str, int] | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, protocol_factory: Callable[[], _ProtocolT], - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, - ssl: _SSLContext = ..., - family: int = ..., - proto: int = ..., - flags: int = ..., + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, sock: socket, - local_addr: None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): @overload @abstractmethod async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload @abstractmethod async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @abstractmethod async def start_tls( self, - transport: BaseTransport, + transport: WriteTransport, protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - ) -> BaseTransport: ... + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, - path: StrPath | None = ..., + path: StrPath | None = None, *, - sock: socket | None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - start_serving: bool = ..., + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... else: @overload @@ -387,36 +387,36 @@ class AbstractEventLoop: async def create_server( self, protocol_factory: _ProtocolFactory, - host: str | Sequence[str] | None = ..., + host: str | Sequence[str] | None = None, port: int = ..., *, family: int = ..., flags: int = ..., - sock: None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @overload @abstractmethod async def create_server( self, protocol_factory: _ProtocolFactory, - host: None = ..., - port: None = ..., + host: None = None, + port: None = None, *, family: int = ..., flags: int = ..., sock: socket = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... @abstractmethod async def start_tls( @@ -425,20 +425,20 @@ class AbstractEventLoop: protocol: BaseProtocol, sslcontext: ssl.SSLContext, *, - server_side: bool = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> BaseTransport: ... + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, - path: StrPath | None = ..., + path: StrPath | None = None, *, - sock: socket | None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - start_serving: bool = ..., + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, ) -> Server: ... if sys.version_info >= (3, 11): async def connect_accepted_socket( @@ -446,66 +446,66 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... elif sys.version_info >= (3, 10): async def connect_accepted_socket( self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, - ssl: _SSLContext = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... if sys.version_info >= (3, 11): async def create_unix_connection( self, protocol_factory: Callable[[], _ProtocolT], - path: str | None = ..., + path: str | None = None, *, - ssl: _SSLContext = ..., - sock: socket | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ssl_shutdown_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... else: async def create_unix_connection( self, protocol_factory: Callable[[], _ProtocolT], - path: str | None = ..., + path: str | None = None, *, - ssl: _SSLContext = ..., - sock: socket | None = ..., - server_hostname: str | None = ..., - ssl_handshake_timeout: float | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... @abstractmethod async def sock_sendfile( - self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... + self, sock: socket, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool | None = None ) -> int: ... @abstractmethod async def sendfile( - self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... + self, transport: WriteTransport, file: IO[bytes], offset: int = 0, count: int | None = None, *, fallback: bool = True ) -> int: ... @abstractmethod async def create_datagram_endpoint( self, protocol_factory: Callable[[], _ProtocolT], - local_addr: tuple[str, int] | None = ..., - remote_addr: tuple[str, int] | None = ..., + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, *, - family: int = ..., - proto: int = ..., - flags: int = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., - ) -> tuple[BaseTransport, _ProtocolT]: ... + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... # Pipes and subprocesses. @abstractmethod async def connect_read_pipe( @@ -521,14 +521,14 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], cmd: bytes | str, *, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... @@ -538,14 +538,14 @@ class AbstractEventLoop: protocol_factory: Callable[[], _ProtocolT], program: Any, *args: Any, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, **kwargs: Any, ) -> tuple[SubprocessTransport, _ProtocolT]: ... @abstractmethod @@ -562,7 +562,7 @@ class AbstractEventLoop: @abstractmethod async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... @abstractmethod - async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... @abstractmethod async def sock_connect(self, sock: socket, address: _Address) -> None: ... @abstractmethod @@ -571,9 +571,9 @@ class AbstractEventLoop: @abstractmethod async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... @abstractmethod - async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = ...) -> int: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... @abstractmethod - async def sock_sendto(self, sock: socket, data: bytes, address: _Address) -> None: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., object], *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/format_helpers.pyi b/mypy/typeshed/stdlib/asyncio/format_helpers.pyi index 4e2ef8d3f274..1c78dff3948a 100644 --- a/mypy/typeshed/stdlib/asyncio/format_helpers.pyi +++ b/mypy/typeshed/stdlib/asyncio/format_helpers.pyi @@ -16,5 +16,5 @@ def _get_function_source(func: _FuncType) -> tuple[str, int]: ... def _get_function_source(func: object) -> tuple[str, int] | None: ... def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... -def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = ...) -> str: ... -def extract_stack(f: FrameType | None = ..., limit: int | None = ...) -> traceback.StackSummary: ... +def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... diff --git a/mypy/typeshed/stdlib/asyncio/futures.pyi b/mypy/typeshed/stdlib/asyncio/futures.pyi index f917bd5dee98..79209f5ed4fb 100644 --- a/mypy/typeshed/stdlib/asyncio/futures.pyi +++ b/mypy/typeshed/stdlib/asyncio/futures.pyi @@ -1,9 +1,8 @@ import sys -from _typeshed import Self from collections.abc import Awaitable, Callable, Generator, Iterable from concurrent.futures._base import Error, Future as _ConcurrentFuture from typing import Any, TypeVar -from typing_extensions import Literal, TypeGuard +from typing_extensions import Literal, Self, TypeGuard from .events import AbstractEventLoop @@ -43,10 +42,10 @@ class Future(Awaitable[_T], Iterable[_T]): def __del__(self) -> None: ... def get_loop(self) -> AbstractEventLoop: ... @property - def _callbacks(self: Self) -> list[tuple[Callable[[Self], Any], Context]]: ... - def add_done_callback(self: Self, __fn: Callable[[Self], object], *, context: Context | None = ...) -> None: ... + def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... + def add_done_callback(self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ... if sys.version_info >= (3, 9): - def cancel(self, msg: Any | None = ...) -> bool: ... + def cancel(self, msg: Any | None = None) -> bool: ... else: def cancel(self) -> bool: ... @@ -54,7 +53,7 @@ class Future(Awaitable[_T], Iterable[_T]): def done(self) -> bool: ... def result(self) -> _T: ... def exception(self) -> BaseException | None: ... - def remove_done_callback(self: Self, __fn: Callable[[Self], object]) -> int: ... + def remove_done_callback(self, __fn: Callable[[Self], object]) -> int: ... def set_result(self, __result: _T) -> None: ... def set_exception(self, __exception: type | BaseException) -> None: ... def __iter__(self) -> Generator[Any, None, _T]: ... @@ -64,4 +63,4 @@ class Future(Awaitable[_T], Iterable[_T]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/locks.pyi b/mypy/typeshed/stdlib/asyncio/locks.pyi index a5cdf9aa1184..ab4e63ab59b1 100644 --- a/mypy/typeshed/stdlib/asyncio/locks.pyi +++ b/mypy/typeshed/stdlib/asyncio/locks.pyi @@ -1,11 +1,11 @@ import enum import sys -from _typeshed import Self +from _typeshed import Unused from collections import deque from collections.abc import Callable, Generator from types import TracebackType from typing import Any, TypeVar -from typing_extensions import Literal +from typing_extensions import Literal, Self from .events import AbstractEventLoop from .futures import Future @@ -31,7 +31,7 @@ else: class _ContextManager: def __init__(self, lock: Lock | Semaphore) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class _ContextManagerMixin: # Apparently this exists to *prohibit* use as a context manager. @@ -45,20 +45,20 @@ else: ) -> None: ... class Lock(_ContextManagerMixin): - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: - def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... class Event: - if sys.version_info >= (3, 11): + if sys.version_info >= (3, 10): def __init__(self) -> None: ... else: - def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... @@ -66,26 +66,26 @@ class Event: async def wait(self) -> Literal[True]: ... class Condition(_ContextManagerMixin): - if sys.version_info >= (3, 11): - def __init__(self, lock: Lock | None = ...) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, lock: Lock | None = None) -> None: ... else: - def __init__(self, lock: Lock | None = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... async def wait(self) -> Literal[True]: ... async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... - def notify(self, n: int = ...) -> None: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... class Semaphore(_ContextManagerMixin): _value: int _waiters: deque[Future[Any]] - if sys.version_info >= (3, 11): - def __init__(self, value: int = ...) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, value: int = 1) -> None: ... else: - def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... def locked(self) -> bool: ... async def acquire(self) -> Literal[True]: ... @@ -103,8 +103,8 @@ if sys.version_info >= (3, 11): class Barrier(_LoopBoundMixin): def __init__(self, parties: int) -> None: ... - async def __aenter__(self: Self) -> Self: ... - async def __aexit__(self, *args: object) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args: Unused) -> None: ... async def wait(self) -> int: ... async def abort(self) -> None: ... async def reset(self) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/mixins.pyi b/mypy/typeshed/stdlib/asyncio/mixins.pyi index 3e04f2b37518..6ebcf543e6b9 100644 --- a/mypy/typeshed/stdlib/asyncio/mixins.pyi +++ b/mypy/typeshed/stdlib/asyncio/mixins.pyi @@ -1,9 +1,9 @@ import sys import threading -from typing import NoReturn +from typing_extensions import Never _global_lock: threading.Lock class _LoopBoundMixin: if sys.version_info < (3, 11): - def __init__(self, *, loop: NoReturn = ...) -> None: ... + def __init__(self, *, loop: Never = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/proactor_events.pyi b/mypy/typeshed/stdlib/asyncio/proactor_events.pyi index 704939450cc5..33fdf84ade4a 100644 --- a/mypy/typeshed/stdlib/asyncio/proactor_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/proactor_events.pyi @@ -20,9 +20,9 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, ) -> None: ... if sys.version_info >= (3, 8): def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ... @@ -36,10 +36,10 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., - buffer_size: int = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + buffer_size: int = 65536, ) -> None: ... else: def __init__( @@ -47,9 +47,9 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTran loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, ) -> None: ... class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... @@ -57,16 +57,15 @@ class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): - _sendfile_compatible: ClassVar[constants._SendfileMode] def __init__( self, loop: events.AbstractEventLoop, sock: socket, protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, ) -> None: ... def _set_extra(self, sock: socket) -> None: ... def can_write_eof(self) -> Literal[True]: ... diff --git a/mypy/typeshed/stdlib/asyncio/queues.pyi b/mypy/typeshed/stdlib/asyncio/queues.pyi index 90ba39aebb96..f56a09524e71 100644 --- a/mypy/typeshed/stdlib/asyncio/queues.pyi +++ b/mypy/typeshed/stdlib/asyncio/queues.pyi @@ -13,10 +13,10 @@ class QueueFull(Exception): ... _T = TypeVar("_T") class Queue(Generic[_T]): - if sys.version_info >= (3, 11): - def __init__(self, maxsize: int = ...) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, maxsize: int = 0) -> None: ... else: - def __init__(self, maxsize: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ... def _init(self, maxsize: int) -> None: ... def _get(self) -> _T: ... diff --git a/mypy/typeshed/stdlib/asyncio/runners.pyi b/mypy/typeshed/stdlib/asyncio/runners.pyi index 49d236bbee9e..847072b633ac 100644 --- a/mypy/typeshed/stdlib/asyncio/runners.pyi +++ b/mypy/typeshed/stdlib/asyncio/runners.pyi @@ -1,8 +1,9 @@ import sys -from _typeshed import Self +from _typeshed import Unused from collections.abc import Callable, Coroutine from contextvars import Context from typing import Any, TypeVar +from typing_extensions import Self, final from .events import AbstractEventLoop @@ -13,16 +14,22 @@ else: _T = TypeVar("_T") if sys.version_info >= (3, 11): + @final class Runner: - def __init__(self, *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, exc_type: object, exc_val: object, exc_tb: object) -> None: ... + def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... def close(self) -> None: ... def get_loop(self) -> AbstractEventLoop: ... - def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = ...) -> _T: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... -if sys.version_info >= (3, 8): - def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = ...) -> _T: ... +if sys.version_info >= (3, 12): + def run( + main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ... + ) -> _T: ... + +elif sys.version_info >= (3, 8): + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... else: - def run(main: Coroutine[Any, Any, _T], *, debug: bool = ...) -> _T: ... + def run(main: Coroutine[Any, Any, _T], *, debug: bool = False) -> _T: ... diff --git a/mypy/typeshed/stdlib/asyncio/selector_events.pyi b/mypy/typeshed/stdlib/asyncio/selector_events.pyi index c5468d4d72c7..430f2dd405cd 100644 --- a/mypy/typeshed/stdlib/asyncio/selector_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/selector_events.pyi @@ -5,4 +5,4 @@ from . import base_events __all__ = ("BaseSelectorEventLoop",) class BaseSelectorEventLoop(base_events.BaseEventLoop): - def __init__(self, selector: selectors.BaseSelector | None = ...) -> None: ... + def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/sslproto.pyi b/mypy/typeshed/stdlib/asyncio/sslproto.pyi index 3bb4db69c123..aadc7d32b40f 100644 --- a/mypy/typeshed/stdlib/asyncio/sslproto.pyi +++ b/mypy/typeshed/stdlib/asyncio/sslproto.pyi @@ -35,7 +35,6 @@ else: if sys.version_info < (3, 11): class _SSLPipe: - max_size: ClassVar[int] _context: ssl.SSLContext @@ -48,7 +47,7 @@ if sys.version_info < (3, 11): _need_ssldata: bool _handshake_cb: Callable[[BaseException | None], None] | None _shutdown_cb: Callable[[], None] | None - def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = ...) -> None: ... + def __init__(self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None) -> None: ... @property def context(self) -> ssl.SSLContext: ... @property @@ -57,29 +56,28 @@ if sys.version_info < (3, 11): def need_ssldata(self) -> bool: ... @property def wrapped(self) -> bool: ... - def do_handshake(self, callback: Callable[[BaseException | None], object] | None = ...) -> list[bytes]: ... - def shutdown(self, callback: Callable[[], object] | None = ...) -> list[bytes]: ... + def do_handshake(self, callback: Callable[[BaseException | None], object] | None = None) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... def feed_eof(self) -> None: ... - def feed_ssldata(self, data: bytes, only_handshake: bool = ...) -> tuple[list[bytes], list[bytes]]: ... - def feed_appdata(self, data: bytes, offset: int = ...) -> tuple[list[bytes], int]: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = False) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): - _sendfile_compatible: ClassVar[constants._SendfileMode] _loop: events.AbstractEventLoop _ssl_protocol: SSLProtocol _closed: bool def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... - def get_extra_info(self, name: str, default: Any | None = ...) -> dict[str, Any]: ... + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... @property def _protocol_paused(self) -> bool: ... - def write(self, data: bytes) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... def can_write_eof(self) -> Literal[False]: ... if sys.version_info >= (3, 11): def get_write_buffer_limits(self) -> tuple[int, int]: ... def get_read_buffer_limits(self) -> tuple[int, int]: ... - def set_read_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... + def set_read_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... def get_read_buffer_size(self) -> int: ... if sys.version_info >= (3, 11): @@ -118,11 +116,11 @@ class SSLProtocol(_SSLProtocolBase): app_protocol: protocols.BaseProtocol, sslcontext: ssl.SSLContext, waiter: futures.Future[Any], - server_side: bool = ..., - server_hostname: str | None = ..., - call_connection_made: bool = ..., - ssl_handshake_timeout: int | None = ..., - ssl_shutdown_timeout: float | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ssl_shutdown_timeout: float | None = None, ) -> None: ... else: def __init__( @@ -131,17 +129,17 @@ class SSLProtocol(_SSLProtocolBase): app_protocol: protocols.BaseProtocol, sslcontext: ssl.SSLContext, waiter: futures.Future[Any], - server_side: bool = ..., - server_hostname: str | None = ..., - call_connection_made: bool = ..., - ssl_handshake_timeout: int | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, ) -> None: ... def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... - def _wakeup_waiter(self, exc: BaseException | None = ...) -> None: ... + def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... def connection_lost(self, exc: BaseException | None) -> None: ... def eof_received(self) -> None: ... - def _get_extra_info(self, name: str, default: Any | None = ...) -> Any: ... + def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... def _start_shutdown(self) -> None: ... if sys.version_info >= (3, 11): def _write_appdata(self, list_of_data: list[bytes]) -> None: ... @@ -151,7 +149,7 @@ class SSLProtocol(_SSLProtocolBase): def _start_handshake(self) -> None: ... def _check_handshake_timeout(self) -> None: ... def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... - def _fatal_error(self, exc: BaseException, message: str = ...) -> None: ... + def _fatal_error(self, exc: BaseException, message: str = "Fatal error on transport") -> None: ... def _abort(self) -> None: ... if sys.version_info >= (3, 11): def get_buffer(self, n: int) -> memoryview: ... diff --git a/mypy/typeshed/stdlib/asyncio/staggered.pyi b/mypy/typeshed/stdlib/asyncio/staggered.pyi index 610d6f70b614..3324777f4168 100644 --- a/mypy/typeshed/stdlib/asyncio/staggered.pyi +++ b/mypy/typeshed/stdlib/asyncio/staggered.pyi @@ -6,5 +6,5 @@ from . import events __all__ = ("staggered_race",) async def staggered_race( - coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = ... + coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None ) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/mypy/typeshed/stdlib/asyncio/streams.pyi b/mypy/typeshed/stdlib/asyncio/streams.pyi index 139d86b292c3..f30c57305d93 100644 --- a/mypy/typeshed/stdlib/asyncio/streams.pyi +++ b/mypy/typeshed/stdlib/asyncio/streams.pyi @@ -1,9 +1,9 @@ import ssl import sys -from _typeshed import Self, StrPath +from _typeshed import StrPath from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence from typing import Any -from typing_extensions import TypeAlias +from typing_extensions import Self, SupportsIndex, TypeAlias from . import events, protocols, transports from .base_events import Server @@ -59,40 +59,40 @@ if sys.version_info < (3, 8): if sys.version_info >= (3, 10): async def open_connection( - host: str | None = ..., - port: int | str | None = ..., + host: str | None = None, + port: int | str | None = None, *, - limit: int = ..., + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, - host: str | Sequence[str] | None = ..., - port: int | str | None = ..., + host: str | Sequence[str] | None = None, + port: int | str | None = None, *, - limit: int = ..., + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> Server: ... else: async def open_connection( - host: str | None = ..., - port: int | str | None = ..., + host: str | None = None, + port: int | str | None = None, *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> tuple[StreamReader, StreamWriter]: ... async def start_server( client_connected_cb: _ClientConnectedCallback, - host: str | None = ..., - port: int | str | None = ..., + host: str | None = None, + port: int | str | None = None, *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> Server: ... @@ -100,33 +100,33 @@ else: if sys.platform != "win32": if sys.version_info >= (3, 10): async def open_unix_connection( - path: StrPath | None = ..., *, limit: int = ..., **kwds: Any + path: StrPath | None = None, *, limit: int = 65536, **kwds: Any ) -> tuple[StreamReader, StreamWriter]: ... async def start_unix_server( - client_connected_cb: _ClientConnectedCallback, path: StrPath | None = ..., *, limit: int = ..., **kwds: Any + client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any ) -> Server: ... else: async def open_unix_connection( - path: StrPath | None = ..., *, loop: events.AbstractEventLoop | None = ..., limit: int = ..., **kwds: Any + path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any ) -> tuple[StreamReader, StreamWriter]: ... async def start_unix_server( client_connected_cb: _ClientConnectedCallback, - path: StrPath | None = ..., + path: StrPath | None = None, *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, **kwds: Any, ) -> Server: ... class FlowControlMixin(protocols.Protocol): - def __init__(self, loop: events.AbstractEventLoop | None = ...) -> None: ... + def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def __init__( self, stream_reader: StreamReader, - client_connected_cb: _ClientConnectedCallback | None = ..., - loop: events.AbstractEventLoop | None = ..., + client_connected_cb: _ClientConnectedCallback | None = None, + loop: events.AbstractEventLoop | None = None, ) -> None: ... class StreamWriter: @@ -139,31 +139,32 @@ class StreamWriter: ) -> None: ... @property def transport(self) -> transports.WriteTransport: ... - def write(self, data: bytes) -> None: ... - def writelines(self, data: Iterable[bytes]) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def close(self) -> None: ... def is_closing(self) -> bool: ... async def wait_closed(self) -> None: ... - def get_extra_info(self, name: str, default: Any = ...) -> Any: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... async def drain(self) -> None: ... if sys.version_info >= (3, 11): async def start_tls( - self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ... + self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None ) -> None: ... class StreamReader(AsyncIterator[bytes]): - def __init__(self, limit: int = ..., loop: events.AbstractEventLoop | None = ...) -> None: ... + def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ... def exception(self) -> Exception: ... def set_exception(self, exc: Exception) -> None: ... def set_transport(self, transport: transports.BaseTransport) -> None: ... def feed_eof(self) -> None: ... def at_eof(self) -> bool: ... - def feed_data(self, data: bytes) -> None: ... + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... async def readline(self) -> bytes: ... - async def readuntil(self, separator: bytes = ...) -> bytes: ... - async def read(self, n: int = ...) -> bytes: ... + # Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted + async def readuntil(self, separator: bytes | bytearray | memoryview = b"\n") -> bytes: ... + async def read(self, n: int = -1) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... - def __aiter__(self: Self) -> Self: ... + def __aiter__(self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/asyncio/subprocess.pyi b/mypy/typeshed/stdlib/asyncio/subprocess.pyi index 32fcf1a65491..10a414f24537 100644 --- a/mypy/typeshed/stdlib/asyncio/subprocess.pyi +++ b/mypy/typeshed/stdlib/asyncio/subprocess.pyi @@ -2,7 +2,7 @@ import subprocess import sys from _typeshed import StrOrBytesPath from asyncio import events, protocols, streams, transports -from collections.abc import Callable +from collections.abc import Callable, Collection from typing import IO, Any from typing_extensions import Literal, TypeAlias @@ -38,22 +38,22 @@ class Process: def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - async def communicate(self, input: bytes | None = ...) -> tuple[bytes, bytes]: ... + async def communicate(self, input: bytes | bytearray | memoryview | None = None) -> tuple[bytes, bytes]: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 11): async def create_subprocess_shell( cmd: str | bytes, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., # These parameters are taken by subprocess.Popen, which this ultimately delegates to executable: StrOrBytesPath | None = ..., @@ -65,21 +65,27 @@ if sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + process_group: int | None = ..., + pipesize: int = ..., ) -> Process: ... async def create_subprocess_exec( program: _ExecArg, *args: _ExecArg, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, # These parameters are taken by subprocess.Popen, which this ultimately delegates to text: bool | None = ..., executable: StrOrBytesPath | None = ..., @@ -91,24 +97,94 @@ if sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + process_group: int | None = ..., + pipesize: int = ..., ) -> Process: ... -else: +elif sys.version_info >= (3, 10): + async def create_subprocess_shell( + cmd: str | bytes, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + *, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = ..., + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + executable: StrOrBytesPath | None = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: subprocess._ENV | None = ..., + startupinfo: Any | None = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + pipesize: int = ..., + ) -> Process: ... + async def create_subprocess_exec( + program: _ExecArg, + *args: _ExecArg, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + limit: int = 65536, + # These parameters are forced to these values by BaseEventLoop.subprocess_shell + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + # These parameters are taken by subprocess.Popen, which this ultimately delegates to + text: bool | None = ..., + executable: StrOrBytesPath | None = ..., + preexec_fn: Callable[[], Any] | None = ..., + close_fds: bool = ..., + cwd: StrOrBytesPath | None = ..., + env: subprocess._ENV | None = ..., + startupinfo: Any | None = ..., + creationflags: int = ..., + restore_signals: bool = ..., + start_new_session: bool = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., + pipesize: int = ..., + ) -> Process: ... + +else: # >= 3.9 async def create_subprocess_shell( cmd: str | bytes, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, *, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, text: Literal[False, None] = ..., # These parameters are taken by subprocess.Popen, which this ultimately delegates to executable: StrOrBytesPath | None = ..., @@ -120,22 +196,26 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., ) -> Process: ... async def create_subprocess_exec( program: _ExecArg, *args: _ExecArg, - stdin: int | IO[Any] | None = ..., - stdout: int | IO[Any] | None = ..., - stderr: int | IO[Any] | None = ..., - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = None, + stderr: int | IO[Any] | None = None, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, # These parameters are forced to these values by BaseEventLoop.subprocess_shell - universal_newlines: Literal[False] = ..., - shell: Literal[True] = ..., - bufsize: Literal[0] = ..., - encoding: None = ..., - errors: None = ..., + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, # These parameters are taken by subprocess.Popen, which this ultimately delegates to text: bool | None = ..., executable: StrOrBytesPath | None = ..., @@ -147,5 +227,9 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., + group: None | str | int = ..., + extra_groups: None | Collection[str | int] = ..., + user: None | str | int = ..., + umask: int = ..., ) -> Process: ... diff --git a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi index 9b2f15506c50..8daa96f1ede0 100644 --- a/mypy/typeshed/stdlib/asyncio/taskgroups.pyi +++ b/mypy/typeshed/stdlib/asyncio/taskgroups.pyi @@ -1,10 +1,10 @@ # This only exists in 3.11+. See VERSIONS. -from _typeshed import Self from collections.abc import Coroutine, Generator from contextvars import Context from types import TracebackType from typing import Any, TypeVar +from typing_extensions import Self from .tasks import Task @@ -13,9 +13,8 @@ __all__ = ["TaskGroup"] _T = TypeVar("_T") class TaskGroup: - def __init__(self) -> None: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def create_task( - self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ..., context: Context | None = ... + self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None ) -> Task[_T]: ... diff --git a/mypy/typeshed/stdlib/asyncio/tasks.pyi b/mypy/typeshed/stdlib/asyncio/tasks.pyi index 76755f1109c3..0a44255a3ac8 100644 --- a/mypy/typeshed/stdlib/asyncio/tasks.pyi +++ b/mypy/typeshed/stdlib/asyncio/tasks.pyi @@ -36,6 +36,7 @@ __all__ = ( ) _T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") @@ -50,17 +51,17 @@ FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION ALL_COMPLETED = concurrent.futures.ALL_COMPLETED if sys.version_info >= (3, 10): - def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = ...) -> Iterator[Future[_T]]: ... + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... else: def as_completed( - fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ... + fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None ) -> Iterator[Future[_T]]: ... @overload -def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = ...) -> _FT: ... # type: ignore[misc] +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[misc] @overload -def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = ...) -> Task[_T]: ... +def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... # `gather()` actually returns a list with length equal to the number # of tasks passed; however, Tuple is used similar to the annotation for @@ -71,10 +72,10 @@ def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | No # but having overlapping overloads is the only way to get acceptable type inference in all edge cases. if sys.version_info >= (3, 10): @overload - def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = ...) -> Future[tuple[_T1]]: ... # type: ignore[misc] + def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[misc] @overload def gather( # type: ignore[misc] - __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = ... + __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = False ) -> Future[tuple[_T1, _T2]]: ... @overload def gather( # type: ignore[misc] @@ -82,7 +83,7 @@ if sys.version_info >= (3, 10): __coro_or_future2: _FutureLike[_T2], __coro_or_future3: _FutureLike[_T3], *, - return_exceptions: Literal[False] = ..., + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3]]: ... @overload def gather( # type: ignore[misc] @@ -91,7 +92,7 @@ if sys.version_info >= (3, 10): __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], *, - return_exceptions: Literal[False] = ..., + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather( # type: ignore[misc] @@ -101,7 +102,7 @@ if sys.version_info >= (3, 10): __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], *, - return_exceptions: Literal[False] = ..., + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[misc] @@ -139,20 +140,20 @@ if sys.version_info >= (3, 10): tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... @overload - def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = ...) -> Future[list[Any]]: ... # type: ignore[misc] + def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = False) -> Future[list[Any]]: ... # type: ignore[misc] else: @overload def gather( # type: ignore[misc] - __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ... + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False ) -> Future[tuple[_T1]]: ... @overload def gather( # type: ignore[misc] __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2]]: ... @overload def gather( # type: ignore[misc] @@ -160,8 +161,8 @@ else: __coro_or_future2: _FutureLike[_T2], __coro_or_future3: _FutureLike[_T3], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3]]: ... @overload def gather( # type: ignore[misc] @@ -170,8 +171,8 @@ else: __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather( # type: ignore[misc] @@ -181,19 +182,19 @@ else: __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], *, - loop: AbstractEventLoop | None = ..., - return_exceptions: Literal[False] = ..., + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather( # type: ignore[misc] - __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool ) -> Future[tuple[_T1 | BaseException]]: ... @overload def gather( # type: ignore[misc] __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... @overload @@ -202,7 +203,7 @@ else: __coro_or_future2: _FutureLike[_T2], __coro_or_future3: _FutureLike[_T3], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... @overload @@ -212,7 +213,7 @@ else: __coro_or_future3: _FutureLike[_T3], __coro_or_future4: _FutureLike[_T4], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... @overload @@ -223,14 +224,14 @@ else: __coro_or_future4: _FutureLike[_T4], __coro_or_future5: _FutureLike[_T5], *, - loop: AbstractEventLoop | None = ..., + loop: AbstractEventLoop | None = None, return_exceptions: bool, ) -> Future[ tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... @overload def gather( # type: ignore[misc] - *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = ..., return_exceptions: bool = ... + *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = None, return_exceptions: bool = False ) -> Future[list[Any]]: ... def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... @@ -242,74 +243,86 @@ if sys.version_info >= (3, 10): @overload async def sleep(delay: float, result: _T) -> _T: ... @overload - async def wait(fs: Iterable[_FT], *, timeout: float | None = ..., return_when: str = ...) -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] + async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] @overload async def wait( - fs: Iterable[Awaitable[_T]], *, timeout: float | None = ..., return_when: str = ... + fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... else: - def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... @overload - async def sleep(delay: float, *, loop: AbstractEventLoop | None = ...) -> None: ... + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... @overload - async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = ...) -> _T: ... + async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... @overload async def wait( # type: ignore[misc] - fs: Iterable[_FT], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... + fs: Iterable[_FT], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", ) -> tuple[set[_FT], set[_FT]]: ... @overload async def wait( - fs: Iterable[Awaitable[_T]], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... + fs: Iterable[Awaitable[_T]], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... - async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = ...) -> _T: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... -class Task(Future[_T], Generic[_T]): +# mypy and pyright complain that a subclass of an invariant class shouldn't be covariant. +# While this is true in general, here it's sort-of okay to have a covariant subclass, +# since the only reason why `asyncio.Future` is invariant is the `set_result()` method, +# and `asyncio.Task.set_result()` always raises. +class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] if sys.version_info >= (3, 8): def __init__( self, - coro: Generator[_TaskYieldType, None, _T] | Awaitable[_T], + coro: Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co], *, loop: AbstractEventLoop = ..., name: str | None = ..., ) -> None: ... else: def __init__( - self, coro: Generator[_TaskYieldType, None, _T] | Awaitable[_T], *, loop: AbstractEventLoop = ... + self, coro: Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co], *, loop: AbstractEventLoop = ... ) -> None: ... if sys.version_info >= (3, 8): - def get_coro(self) -> Generator[_TaskYieldType, None, _T] | Awaitable[_T]: ... + def get_coro(self) -> Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co]: ... def get_name(self) -> str: ... def set_name(self, __value: object) -> None: ... - def get_stack(self, *, limit: int | None = ...) -> list[FrameType]: ... - def print_stack(self, *, limit: int | None = ..., file: TextIO | None = ...) -> None: ... + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... if sys.version_info >= (3, 11): def cancelling(self) -> int: ... def uncancel(self) -> int: ... if sys.version_info < (3, 9): @classmethod - def current_task(cls, loop: AbstractEventLoop | None = ...) -> Task[Any] | None: ... + def current_task(cls, loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... @classmethod - def all_tasks(cls, loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... + def all_tasks(cls, loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -def all_tasks(loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... if sys.version_info >= (3, 11): def create_task( - coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ..., context: Context | None = ... + coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None, context: Context | None = None ) -> Task[_T]: ... elif sys.version_info >= (3, 8): - def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ...) -> Task[_T]: ... + def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = None) -> Task[_T]: ... else: def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T]) -> Task[_T]: ... -def current_task(loop: AbstractEventLoop | None = ...) -> Task[Any] | None: ... +def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... def _register_task(task: Task[Any]) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/timeouts.pyi b/mypy/typeshed/stdlib/asyncio/timeouts.pyi index be516b5851d1..2d31b777b77d 100644 --- a/mypy/typeshed/stdlib/asyncio/timeouts.pyi +++ b/mypy/typeshed/stdlib/asyncio/timeouts.pyi @@ -1,6 +1,5 @@ -from _typeshed import Self from types import TracebackType -from typing_extensions import final +from typing_extensions import Self, final __all__ = ("Timeout", "timeout", "timeout_at") @@ -10,7 +9,7 @@ class Timeout: def when(self) -> float | None: ... def reschedule(self, when: float | None) -> None: ... def expired(self) -> bool: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/transports.pyi b/mypy/typeshed/stdlib/asyncio/transports.pyi index 52937c9bcbdf..531f77672438 100644 --- a/mypy/typeshed/stdlib/asyncio/transports.pyi +++ b/mypy/typeshed/stdlib/asyncio/transports.pyi @@ -1,14 +1,14 @@ from asyncio.events import AbstractEventLoop from asyncio.protocols import BaseProtocol -from collections.abc import Mapping +from collections.abc import Iterable, Mapping from socket import _Address from typing import Any __all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") class BaseTransport: - def __init__(self, extra: Mapping[Any, Any] | None = ...) -> None: ... - def get_extra_info(self, name: Any, default: Any = ...) -> Any: ... + def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... def is_closing(self) -> bool: ... def close(self) -> None: ... def set_protocol(self, protocol: BaseProtocol) -> None: ... @@ -20,11 +20,11 @@ class ReadTransport(BaseTransport): def resume_reading(self) -> None: ... class WriteTransport(BaseTransport): - def set_write_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... def get_write_buffer_size(self) -> int: ... def get_write_buffer_limits(self) -> tuple[int, int]: ... - def write(self, data: Any) -> None: ... - def writelines(self, list_of_data: list[Any]) -> None: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ... def write_eof(self) -> None: ... def can_write_eof(self) -> bool: ... def abort(self) -> None: ... @@ -32,16 +32,16 @@ class WriteTransport(BaseTransport): class Transport(ReadTransport, WriteTransport): ... class DatagramTransport(BaseTransport): - def sendto(self, data: Any, addr: _Address | None = ...) -> None: ... + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... def abort(self) -> None: ... class SubprocessTransport(BaseTransport): def get_pid(self) -> int: ... def get_returncode(self) -> int | None: ... def get_pipe_transport(self, fd: int) -> BaseTransport | None: ... - def send_signal(self, signal: int) -> int: ... + def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... class _FlowControlMixin(Transport): - def __init__(self, extra: Mapping[Any, Any] | None = ..., loop: AbstractEventLoop | None = ...) -> None: ... + def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/trsock.pyi b/mypy/typeshed/stdlib/asyncio/trsock.pyi index b8972e43d255..742216a84ccd 100644 --- a/mypy/typeshed/stdlib/asyncio/trsock.pyi +++ b/mypy/typeshed/stdlib/asyncio/trsock.pyi @@ -1,5 +1,6 @@ import socket import sys +from _typeshed import ReadableBuffer from builtins import type as Type # alias to avoid name clashes with property named "type" from collections.abc import Iterable from types import TracebackType @@ -7,7 +8,7 @@ from typing import Any, BinaryIO, NoReturn, overload from typing_extensions import TypeAlias # These are based in socket, maybe move them out into _typeshed.pyi or such -_Address: TypeAlias = tuple[Any, ...] | str +_Address: TypeAlias = socket._Address _RetAddress: TypeAlias = Any _WriteBuffer: TypeAlias = bytearray | memoryview _CMSG: TypeAlias = tuple[int, int, bytes] @@ -30,7 +31,7 @@ class TransportSocket: @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... @overload - def setsockopt(self, level: int, optname: int, value: int | bytes) -> None: ... + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ... @overload def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... def getpeername(self) -> _RetAddress: ... @@ -42,9 +43,9 @@ class TransportSocket: if sys.version_info < (3, 11): def _na(self, what: str) -> None: ... def accept(self) -> tuple[socket.socket, _RetAddress]: ... - def connect(self, address: _Address | bytes) -> None: ... - def connect_ex(self, address: _Address | bytes) -> int: ... - def bind(self, address: _Address | bytes) -> None: ... + def connect(self, address: _Address) -> None: ... + def connect_ex(self, address: _Address) -> int: ... + def bind(self, address: _Address) -> None: ... if sys.platform == "win32": def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... else: @@ -57,22 +58,26 @@ class TransportSocket: def detach(self) -> int: ... if sys.platform == "linux": def sendmsg_afalg( - self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> int: ... else: def sendmsg_afalg( - self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... + self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> NoReturn: ... def sendmsg( - self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ..., __flags: int = ..., __address: _Address = ... + self, + __buffers: Iterable[ReadableBuffer], + __ancdata: Iterable[_CMSG] = ..., + __flags: int = ..., + __address: _Address = ..., ) -> int: ... @overload - def sendto(self, data: bytes, address: _Address) -> int: ... + def sendto(self, data: ReadableBuffer, address: _Address) -> int: ... @overload - def sendto(self, data: bytes, flags: int, address: _Address) -> int: ... - def send(self, data: bytes, flags: int = ...) -> int: ... - def sendall(self, data: bytes, flags: int = ...) -> None: ... + def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ... + def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... def set_inheritable(self, inheritable: bool) -> None: ... if sys.platform == "win32": def share(self, process_id: int) -> bytes: ... diff --git a/mypy/typeshed/stdlib/asyncio/unix_events.pyi b/mypy/typeshed/stdlib/asyncio/unix_events.pyi index f63011a373be..e28d64b5287b 100644 --- a/mypy/typeshed/stdlib/asyncio/unix_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/unix_events.pyi @@ -1,10 +1,9 @@ import sys import types -from _typeshed import Self from abc import ABCMeta, abstractmethod from collections.abc import Callable from typing import Any -from typing_extensions import Literal +from typing_extensions import Literal, Self from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy from .selector_events import BaseSelectorEventLoop @@ -22,7 +21,7 @@ class AbstractChildWatcher: @abstractmethod def close(self) -> None: ... @abstractmethod - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... @abstractmethod def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... if sys.version_info >= (3, 8): @@ -64,13 +63,13 @@ if sys.platform != "win32": def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... class SafeChildWatcher(BaseChildWatcher): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... def remove_child_handler(self, pid: int) -> bool: ... class FastChildWatcher(BaseChildWatcher): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... def remove_child_handler(self, pid: int) -> bool: ... @@ -85,7 +84,6 @@ if sys.platform != "win32": DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy if sys.version_info >= (3, 8): - from typing import Protocol class _Warn(Protocol): @@ -96,7 +94,7 @@ if sys.platform != "win32": class MultiLoopChildWatcher(AbstractChildWatcher): def is_active(self) -> bool: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -107,7 +105,7 @@ if sys.platform != "win32": class ThreadedChildWatcher(AbstractChildWatcher): def is_active(self) -> Literal[True]: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -118,8 +116,7 @@ if sys.platform != "win32": if sys.version_info >= (3, 9): class PidfdChildWatcher(AbstractChildWatcher): - def __init__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/windows_events.pyi b/mypy/typeshed/stdlib/asyncio/windows_events.pyi index ffb487fff03a..2942a25c0ac4 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_events.pyi @@ -1,6 +1,6 @@ import socket import sys -from _typeshed import WriteableBuffer +from _typeshed import Incomplete, WriteableBuffer from collections.abc import Callable from typing import IO, Any, ClassVar, NoReturn from typing_extensions import Literal @@ -33,7 +33,7 @@ if sys.platform == "win32": class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... class ProactorEventLoop(proactor_events.BaseProactorEventLoop): - def __init__(self, proactor: IocpProactor | None = ...) -> None: ... + def __init__(self, proactor: IocpProactor | None = None) -> None: ... async def create_pipe_connection( self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str ) -> tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... @@ -42,19 +42,23 @@ if sys.platform == "win32": ) -> list[PipeServer]: ... class IocpProactor: - def __init__(self, concurrency: int = ...) -> None: ... + def __init__(self, concurrency: int = 0xFFFFFFFF) -> None: ... def __del__(self) -> None: ... def set_loop(self, loop: events.AbstractEventLoop) -> None: ... - def select(self, timeout: int | None = ...) -> list[futures.Future[Any]]: ... - def recv(self, conn: socket.socket, nbytes: int, flags: int = ...) -> futures.Future[bytes]: ... - def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... - def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... + def select(self, timeout: int | None = None) -> list[futures.Future[Any]]: ... + def recv(self, conn: socket.socket, nbytes: int, flags: int = 0) -> futures.Future[bytes]: ... + def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... + def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = 0) -> futures.Future[Any]: ... def accept(self, listener: socket.socket) -> futures.Future[Any]: ... - def connect(self, conn: socket.socket, address: bytes) -> futures.Future[Any]: ... + def connect( + self, + conn: socket.socket, + address: tuple[Incomplete, Incomplete] | tuple[Incomplete, Incomplete, Incomplete, Incomplete], + ) -> futures.Future[Any]: ... def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... - async def connect_pipe(self, address: bytes) -> windows_utils.PipeHandle: ... - def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = ...) -> bool: ... + async def connect_pipe(self, address: str) -> windows_utils.PipeHandle: ... + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = None) -> bool: ... def close(self) -> None: ... SelectorEventLoop = _WindowsSelectorEventLoop diff --git a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi index 6e170dcb073a..f3a82e2b8462 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi @@ -1,10 +1,9 @@ import subprocess import sys -from _typeshed import Self from collections.abc import Callable from types import TracebackType from typing import Any, AnyStr, Protocol -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.platform == "win32": __all__ = ("pipe", "Popen", "PIPE", "PipeHandle") @@ -16,7 +15,7 @@ if sys.platform == "win32": BUFSIZE: Literal[8192] PIPE = subprocess.PIPE STDOUT = subprocess.STDOUT - def pipe(*, duplex: bool = ..., overlapped: tuple[bool, bool] = ..., bufsize: int = ...) -> tuple[int, int]: ... + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = ..., bufsize: int = 8192) -> tuple[int, int]: ... class PipeHandle: def __init__(self, handle: int) -> None: ... @@ -25,7 +24,7 @@ if sys.platform == "win32": else: def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @property def handle(self) -> int: ... @@ -41,7 +40,7 @@ if sys.platform == "win32": # subprocess.Popen takes other positional-or-keyword arguments before # stdin. def __new__( - cls: type[Self], + cls, args: subprocess._CMD, stdin: subprocess._FILE | None = ..., stdout: subprocess._FILE | None = ..., @@ -51,8 +50,8 @@ if sys.platform == "win32": def __init__( self, args: subprocess._CMD, - stdin: subprocess._FILE | None = ..., - stdout: subprocess._FILE | None = ..., - stderr: subprocess._FILE | None = ..., + stdin: subprocess._FILE | None = None, + stdout: subprocess._FILE | None = None, + stderr: subprocess._FILE | None = None, **kwds: Any, ) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncore.pyi b/mypy/typeshed/stdlib/asyncore.pyi index 0025ec3f9b4e..47c8e2207022 100644 --- a/mypy/typeshed/stdlib/asyncore.pyi +++ b/mypy/typeshed/stdlib/asyncore.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import FileDescriptorLike +from _typeshed import FileDescriptorLike, ReadableBuffer from socket import socket from typing import Any, overload from typing_extensions import TypeAlias @@ -15,17 +15,16 @@ class ExitNow(Exception): ... def read(obj: Any) -> None: ... def write(obj: Any) -> None: ... def readwrite(obj: Any, flags: int) -> None: ... -def poll(timeout: float = ..., map: _MapType | None = ...) -> None: ... -def poll2(timeout: float = ..., map: _MapType | None = ...) -> None: ... +def poll(timeout: float = 0.0, map: _MapType | None = None) -> None: ... +def poll2(timeout: float = 0.0, map: _MapType | None = None) -> None: ... poll3 = poll2 -def loop(timeout: float = ..., use_poll: bool = ..., map: _MapType | None = ..., count: int | None = ...) -> None: ... +def loop(timeout: float = 30.0, use_poll: bool = False, map: _MapType | None = None, count: int | None = None) -> None: ... # Not really subclass of socket.socket; it's only delegation. # It is not covariant to it. class dispatcher: - debug: bool connected: bool accepting: bool @@ -33,11 +32,11 @@ class dispatcher: closing: bool ignore_log_types: frozenset[str] socket: _Socket | None - def __init__(self, sock: _Socket | None = ..., map: _MapType | None = ...) -> None: ... - def add_channel(self, map: _MapType | None = ...) -> None: ... - def del_channel(self, map: _MapType | None = ...) -> None: ... + def __init__(self, sock: _Socket | None = None, map: _MapType | None = None) -> None: ... + def add_channel(self, map: _MapType | None = None) -> None: ... + def del_channel(self, map: _MapType | None = None) -> None: ... def create_socket(self, family: int = ..., type: int = ...) -> None: ... - def set_socket(self, sock: _Socket, map: _MapType | None = ...) -> None: ... + def set_socket(self, sock: _Socket, map: _MapType | None = None) -> None: ... def set_reuse_addr(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... @@ -45,11 +44,11 @@ class dispatcher: def bind(self, addr: tuple[Any, ...] | str) -> None: ... def connect(self, address: tuple[Any, ...] | str) -> None: ... def accept(self) -> tuple[_Socket, Any] | None: ... - def send(self, data: bytes) -> int: ... + def send(self, data: ReadableBuffer) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... def log(self, message: Any) -> None: ... - def log_info(self, message: Any, type: str = ...) -> None: ... + def log_info(self, message: Any, type: str = "info") -> None: ... def handle_read_event(self) -> None: ... def handle_connect_event(self) -> None: ... def handle_write_event(self) -> None: ... @@ -68,7 +67,7 @@ class dispatcher_with_send(dispatcher): # def send(self, data: bytes) -> int | None: ... def compact_traceback() -> tuple[tuple[str, str, str], type, type, str]: ... -def close_all(map: _MapType | None = ..., ignore_all: bool = ...) -> None: ... +def close_all(map: _MapType | None = None, ignore_all: bool = False) -> None: ... if sys.platform != "win32": class file_wrapper: @@ -77,7 +76,7 @@ if sys.platform != "win32": def recv(self, bufsize: int, flags: int = ...) -> bytes: ... def send(self, data: bytes, flags: int = ...) -> int: ... @overload - def getsockopt(self, level: int, optname: int, buflen: None = ...) -> int: ... + def getsockopt(self, level: int, optname: int, buflen: None = None) -> int: ... @overload def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... def read(self, bufsize: int, flags: int = ...) -> bytes: ... @@ -86,5 +85,5 @@ if sys.platform != "win32": def fileno(self) -> int: ... class file_dispatcher(dispatcher): - def __init__(self, fd: FileDescriptorLike, map: _MapType | None = ...) -> None: ... + def __init__(self, fd: FileDescriptorLike, map: _MapType | None = None) -> None: ... def set_file(self, fd: int) -> None: ... diff --git a/mypy/typeshed/stdlib/audioop.pyi b/mypy/typeshed/stdlib/audioop.pyi index 62b54ced9127..b5934516e40f 100644 --- a/mypy/typeshed/stdlib/audioop.pyi +++ b/mypy/typeshed/stdlib/audioop.pyi @@ -32,8 +32,8 @@ def ratecv( __inrate: int, __outrate: int, __state: _RatecvState | None, - __weightA: int = ..., - __weightB: int = ..., + __weightA: int = 1, + __weightB: int = 0, ) -> tuple[bytes, _RatecvState]: ... def reverse(__fragment: bytes, __width: int) -> bytes: ... def rms(__fragment: bytes, __width: int) -> int: ... diff --git a/mypy/typeshed/stdlib/base64.pyi b/mypy/typeshed/stdlib/base64.pyi index c2ec85cac40a..24830cbfba04 100644 --- a/mypy/typeshed/stdlib/base64.pyi +++ b/mypy/typeshed/stdlib/base64.pyi @@ -26,24 +26,28 @@ __all__ = [ if sys.version_info >= (3, 10): __all__ += ["b32hexencode", "b32hexdecode"] -def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = ...) -> bytes: ... -def b64decode(s: str | ReadableBuffer, altchars: ReadableBuffer | None = ..., validate: bool = ...) -> bytes: ... +def b64encode(s: ReadableBuffer, altchars: ReadableBuffer | None = None) -> bytes: ... +def b64decode(s: str | ReadableBuffer, altchars: ReadableBuffer | None = None, validate: bool = False) -> bytes: ... def standard_b64encode(s: ReadableBuffer) -> bytes: ... def standard_b64decode(s: str | ReadableBuffer) -> bytes: ... def urlsafe_b64encode(s: ReadableBuffer) -> bytes: ... def urlsafe_b64decode(s: str | ReadableBuffer) -> bytes: ... def b32encode(s: ReadableBuffer) -> bytes: ... -def b32decode(s: str | ReadableBuffer, casefold: bool = ..., map01: bytes | None = ...) -> bytes: ... +def b32decode(s: str | ReadableBuffer, casefold: bool = False, map01: bytes | None = None) -> bytes: ... def b16encode(s: ReadableBuffer) -> bytes: ... -def b16decode(s: str | ReadableBuffer, casefold: bool = ...) -> bytes: ... +def b16decode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... if sys.version_info >= (3, 10): def b32hexencode(s: ReadableBuffer) -> bytes: ... - def b32hexdecode(s: str | ReadableBuffer, casefold: bool = ...) -> bytes: ... + def b32hexdecode(s: str | ReadableBuffer, casefold: bool = False) -> bytes: ... -def a85encode(b: ReadableBuffer, *, foldspaces: bool = ..., wrapcol: int = ..., pad: bool = ..., adobe: bool = ...) -> bytes: ... -def a85decode(b: str | ReadableBuffer, *, foldspaces: bool = ..., adobe: bool = ..., ignorechars: str | bytes = ...) -> bytes: ... -def b85encode(b: ReadableBuffer, pad: bool = ...) -> bytes: ... +def a85encode( + b: ReadableBuffer, *, foldspaces: bool = False, wrapcol: int = 0, pad: bool = False, adobe: bool = False +) -> bytes: ... +def a85decode( + b: str | ReadableBuffer, *, foldspaces: bool = False, adobe: bool = False, ignorechars: bytearray | bytes = b" \t\n\r\x0b" +) -> bytes: ... +def b85encode(b: ReadableBuffer, pad: bool = False) -> bytes: ... def b85decode(b: str | ReadableBuffer) -> bytes: ... def decode(input: IO[bytes], output: IO[bytes]) -> None: ... def encode(input: IO[bytes], output: IO[bytes]) -> None: ... diff --git a/mypy/typeshed/stdlib/bdb.pyi b/mypy/typeshed/stdlib/bdb.pyi index 58808632b31d..2a1fdddff7e9 100644 --- a/mypy/typeshed/stdlib/bdb.pyi +++ b/mypy/typeshed/stdlib/bdb.pyi @@ -24,7 +24,7 @@ class Bdb: stopframe: FrameType | None returnframe: FrameType | None stoplineno: int - def __init__(self, skip: Iterable[str] | None = ...) -> None: ... + def __init__(self, skip: Iterable[str] | None = None) -> None: ... def canonic(self, filename: str) -> str: ... def reset(self) -> None: ... def trace_dispatch(self, frame: FrameType, event: str, arg: Any) -> TraceFunction: ... @@ -41,15 +41,15 @@ class Bdb: def user_line(self, frame: FrameType) -> None: ... def user_return(self, frame: FrameType, return_value: Any) -> None: ... def user_exception(self, frame: FrameType, exc_info: ExcInfo) -> None: ... - def set_until(self, frame: FrameType, lineno: int | None = ...) -> None: ... + def set_until(self, frame: FrameType, lineno: int | None = None) -> None: ... def set_step(self) -> None: ... def set_next(self, frame: FrameType) -> None: ... def set_return(self, frame: FrameType) -> None: ... - def set_trace(self, frame: FrameType | None = ...) -> None: ... + def set_trace(self, frame: FrameType | None = None) -> None: ... def set_continue(self) -> None: ... def set_quit(self) -> None: ... def set_break( - self, filename: str, lineno: int, temporary: bool = ..., cond: str | None = ..., funcname: str | None = ... + self, filename: str, lineno: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None ) -> None: ... def clear_break(self, filename: str, lineno: int) -> None: ... def clear_bpbynumber(self, arg: SupportsInt) -> None: ... @@ -61,14 +61,15 @@ class Bdb: def get_file_breaks(self, filename: str) -> list[Breakpoint]: ... def get_all_breaks(self) -> list[Breakpoint]: ... def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... - def format_stack_entry(self, frame_lineno: int, lprefix: str = ...) -> str: ... - def run(self, cmd: str | CodeType, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... - def runeval(self, expr: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... + def format_stack_entry(self, frame_lineno: int, lprefix: str = ": ") -> str: ... + def run( + self, cmd: str | CodeType, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None + ) -> None: ... + def runeval(self, expr: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... class Breakpoint: - next: int bplist: dict[tuple[str, int], list[Breakpoint]] bpbynumber: list[Breakpoint | None] @@ -84,7 +85,7 @@ class Breakpoint: hits: int number: int def __init__( - self, file: str, line: int, temporary: bool = ..., cond: str | None = ..., funcname: str | None = ... + self, file: str, line: int, temporary: bool = False, cond: str | None = None, funcname: str | None = None ) -> None: ... if sys.version_info >= (3, 11): @staticmethod @@ -93,7 +94,7 @@ class Breakpoint: def deleteMe(self) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... - def bpprint(self, out: IO[str] | None = ...) -> None: ... + def bpprint(self, out: IO[str] | None = None) -> None: ... def bpformat(self) -> str: ... def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... diff --git a/mypy/typeshed/stdlib/binascii.pyi b/mypy/typeshed/stdlib/binascii.pyi index 6f834f7868c3..759b6c39399a 100644 --- a/mypy/typeshed/stdlib/binascii.pyi +++ b/mypy/typeshed/stdlib/binascii.pyi @@ -7,17 +7,17 @@ from typing_extensions import TypeAlias _AsciiBuffer: TypeAlias = str | ReadableBuffer def a2b_uu(__data: _AsciiBuffer) -> bytes: ... -def b2a_uu(__data: ReadableBuffer, *, backtick: bool = ...) -> bytes: ... +def b2a_uu(__data: ReadableBuffer, *, backtick: bool = False) -> bytes: ... if sys.version_info >= (3, 11): - def a2b_base64(__data: _AsciiBuffer, *, strict_mode: bool = ...) -> bytes: ... + def a2b_base64(__data: _AsciiBuffer, *, strict_mode: bool = False) -> bytes: ... else: def a2b_base64(__data: _AsciiBuffer) -> bytes: ... -def b2a_base64(__data: ReadableBuffer, *, newline: bool = ...) -> bytes: ... -def a2b_qp(data: _AsciiBuffer, header: bool = ...) -> bytes: ... -def b2a_qp(data: ReadableBuffer, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ... +def b2a_base64(__data: ReadableBuffer, *, newline: bool = True) -> bytes: ... +def a2b_qp(data: _AsciiBuffer, header: bool = False) -> bytes: ... +def b2a_qp(data: ReadableBuffer, quotetabs: bool = False, istext: bool = True, header: bool = False) -> bytes: ... if sys.version_info < (3, 11): def a2b_hqx(__data: _AsciiBuffer) -> bytes: ... @@ -26,7 +26,7 @@ if sys.version_info < (3, 11): def b2a_hqx(__data: ReadableBuffer) -> bytes: ... def crc_hqx(__data: ReadableBuffer, __crc: int) -> int: ... -def crc32(__data: ReadableBuffer, __crc: int = ...) -> int: ... +def crc32(__data: ReadableBuffer, __crc: int = 0) -> int: ... if sys.version_info >= (3, 8): # sep must be str or bytes, not bytearray or any other buffer diff --git a/mypy/typeshed/stdlib/binhex.pyi b/mypy/typeshed/stdlib/binhex.pyi index 27aa379f134d..e0993c840ce7 100644 --- a/mypy/typeshed/stdlib/binhex.pyi +++ b/mypy/typeshed/stdlib/binhex.pyi @@ -1,3 +1,4 @@ +from _typeshed import _BufferWithLen from typing import IO, Any from typing_extensions import Literal, TypeAlias @@ -10,7 +11,6 @@ LINELEN: Literal[64] RUNCHAR: Literal[b"\x90"] class FInfo: - def __init__(self) -> None: ... Type: str Creator: str Flags: int @@ -28,9 +28,9 @@ class openrsrc: class BinHex: def __init__(self, name_finfo_dlen_rlen: _FileInfoTuple, ofp: _FileHandleUnion) -> None: ... - def write(self, data: bytes) -> None: ... + def write(self, data: _BufferWithLen) -> None: ... def close_data(self) -> None: ... - def write_rsrc(self, data: bytes) -> None: ... + def write_rsrc(self, data: _BufferWithLen) -> None: ... def close(self) -> None: ... def binhex(inp: str, out: str) -> None: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 6992dc8e2674..7b8e25084c91 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -1,17 +1,17 @@ +import _ast +import _typeshed import sys import types -from _ast import AST from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( AnyStr_co, + FileDescriptorOrPath, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode, ReadableBuffer, - Self, - StrOrBytesPath, SupportsAdd, SupportsAiter, SupportsAnext, @@ -32,7 +32,7 @@ from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWra from types import CodeType, TracebackType, _Cell # mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi -from typing import ( # noqa: Y027 +from typing import ( # noqa: Y022 IO, Any, BinaryIO, @@ -50,12 +50,11 @@ from typing import ( # noqa: Y027 SupportsComplex, SupportsFloat, SupportsInt, - SupportsRound, TypeVar, overload, type_check_only, ) -from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -83,12 +82,12 @@ class object: __module__: str __annotations__: dict[str, Any] @property - def __class__(self: Self) -> type[Self]: ... + def __class__(self) -> type[Self]: ... # Ignore errors about type mismatch between property getter and setter @__class__.setter def __class__(self, __type: type[object]) -> None: ... # noqa: F811 def __init__(self) -> None: ... - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers. # Overriding them in subclasses has different semantics, even if the override has an identical signature. def __setattr__(self, __name: str, __value: Any) -> None: ... @@ -169,9 +168,11 @@ class type: @overload def __new__(cls, __o: object) -> type: ... @overload - def __new__(cls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any) -> Self: ... + def __new__( + cls: type[_typeshed.Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any + ) -> _typeshed.Self: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... - def __subclasses__(self: Self) -> list[Self]: ... + def __subclasses__(self: _typeshed.Self) -> list[_typeshed.Self]: ... # Note: the documentation doesn't specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> list[type]: ... @@ -197,9 +198,9 @@ _LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 class int: @overload - def __new__(cls: type[Self], __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... @overload - def __new__(cls: type[Self], __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... @@ -218,25 +219,25 @@ class int: if sys.version_info >= (3, 11): def to_bytes( - self, length: SupportsIndex = ..., byteorder: Literal["little", "big"] = ..., *, signed: bool = ... + self, length: SupportsIndex = 1, byteorder: Literal["little", "big"] = "big", *, signed: bool = False ) -> bytes: ... @classmethod def from_bytes( - cls: type[Self], + cls, bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, - byteorder: Literal["little", "big"] = ..., + byteorder: Literal["little", "big"] = "big", *, - signed: bool = ..., + signed: bool = False, ) -> Self: ... else: - def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = ...) -> bytes: ... + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = False) -> bytes: ... @classmethod def from_bytes( - cls: type[Self], + cls, bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, byteorder: Literal["little", "big"], *, - signed: bool = ..., + signed: bool = False, ) -> Self: ... def __add__(self, __x: int) -> int: ... @@ -258,18 +259,16 @@ class int: @overload def __pow__(self, __x: Literal[0], __modulo: None) -> Literal[1]: ... @overload - def __pow__(self, __x: _PositiveInteger, __modulo: None = ...) -> int: ... + def __pow__(self, __x: _PositiveInteger, __modulo: None = None) -> int: ... @overload - def __pow__(self, __x: _NegativeInteger, __modulo: None = ...) -> float: ... + def __pow__(self, __x: _NegativeInteger, __modulo: None = None) -> float: ... # positive x -> int; negative x -> float # return type must be Any as `int | float` causes too many false-positive errors @overload - def __pow__(self, __x: int, __modulo: None = ...) -> Any: ... - @overload - def __pow__(self, __x: int, __modulo: Literal[0]) -> NoReturn: ... + def __pow__(self, __x: int, __modulo: None = None) -> Any: ... @overload def __pow__(self, __x: int, __modulo: int) -> int: ... - def __rpow__(self, __x: int, __mod: int | None = ...) -> Any: ... + def __rpow__(self, __x: int, __mod: int | None = None) -> Any: ... def __and__(self, __n: int) -> int: ... def __or__(self, __n: int) -> int: ... def __xor__(self, __n: int) -> int: ... @@ -301,12 +300,12 @@ class int: def __index__(self) -> int: ... class float: - def __new__(cls: type[Self], x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def __new__(cls, __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... def as_integer_ratio(self) -> tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod - def fromhex(cls: type[Self], __s: str) -> Self: ... + def fromhex(cls, __s: str) -> Self: ... @property def real(self) -> float: ... @property @@ -320,11 +319,11 @@ class float: def __mod__(self, __x: float) -> float: ... def __divmod__(self, __x: float) -> tuple[float, float]: ... @overload - def __pow__(self, __x: int, __mod: None = ...) -> float: ... + def __pow__(self, __x: int, __mod: None = None) -> float: ... # positive x -> float; negative x -> complex # return type must be Any as `float | complex` causes too many false-positive errors @overload - def __pow__(self, __x: float, __mod: None = ...) -> Any: ... + def __pow__(self, __x: float, __mod: None = None) -> Any: ... def __radd__(self, __x: float) -> float: ... def __rsub__(self, __x: float) -> float: ... def __rmul__(self, __x: float) -> float: ... @@ -333,12 +332,12 @@ class float: def __rmod__(self, __x: float) -> float: ... def __rdivmod__(self, __x: float) -> tuple[float, float]: ... @overload - def __rpow__(self, __x: _PositiveInteger, __modulo: None = ...) -> float: ... + def __rpow__(self, __x: _PositiveInteger, __modulo: None = None) -> float: ... @overload - def __rpow__(self, __x: _NegativeInteger, __mod: None = ...) -> complex: ... + def __rpow__(self, __x: _NegativeInteger, __mod: None = None) -> complex: ... # Returning `complex` for the general case gives too many false-positive errors. @overload - def __rpow__(self, __x: float, __mod: None = ...) -> Any: ... + def __rpow__(self, __x: float, __mod: None = None) -> Any: ... def __getnewargs__(self) -> tuple[float]: ... def __trunc__(self) -> int: ... if sys.version_info >= (3, 9): @@ -346,7 +345,7 @@ class float: def __floor__(self) -> int: ... @overload - def __round__(self, __ndigits: None = ...) -> int: ... + def __round__(self, __ndigits: None = None) -> int: ... @overload def __round__(self, __ndigits: SupportsIndex) -> float: ... def __eq__(self, __x: object) -> bool: ... @@ -367,19 +366,17 @@ class complex: # Python doesn't currently accept SupportsComplex for the second argument @overload def __new__( - cls: type[Self], + cls, real: complex | SupportsComplex | SupportsFloat | SupportsIndex = ..., imag: complex | SupportsFloat | SupportsIndex = ..., ) -> Self: ... @overload - def __new__(cls: type[Self], real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... + def __new__(cls, real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... else: @overload - def __new__( - cls: type[Self], real: complex | SupportsComplex | SupportsFloat = ..., imag: complex | SupportsFloat = ... - ) -> Self: ... + def __new__(cls, real: complex | SupportsComplex | SupportsFloat = ..., imag: complex | SupportsFloat = ...) -> Self: ... @overload - def __new__(cls: type[Self], real: str | SupportsComplex | SupportsFloat | complex) -> Self: ... + def __new__(cls, real: str | SupportsComplex | SupportsFloat | complex) -> Self: ... @property def real(self) -> float: ... @@ -389,12 +386,12 @@ class complex: def __add__(self, __x: complex) -> complex: ... def __sub__(self, __x: complex) -> complex: ... def __mul__(self, __x: complex) -> complex: ... - def __pow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __pow__(self, __x: complex, __mod: None = None) -> complex: ... def __truediv__(self, __x: complex) -> complex: ... def __radd__(self, __x: complex) -> complex: ... def __rsub__(self, __x: complex) -> complex: ... def __rmul__(self, __x: complex) -> complex: ... - def __rpow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __rpow__(self, __x: complex, __mod: None = None) -> complex: ... def __rtruediv__(self, __x: complex) -> complex: ... def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... @@ -408,26 +405,29 @@ class complex: class _FormatMapMapping(Protocol): def __getitem__(self, __key: str) -> Any: ... +class _TranslateTable(Protocol): + def __getitem__(self, __key: int) -> str | int | None: ... + class str(Sequence[str]): @overload - def __new__(cls: type[Self], object: object = ...) -> Self: ... + def __new__(cls, object: object = ...) -> Self: ... @overload - def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... - def capitalize(self) -> str: ... - def casefold(self) -> str: ... - def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... + def __new__(cls, object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + def capitalize(self) -> str: ... # type: ignore[misc] + def casefold(self) -> str: ... # type: ignore[misc] + def center(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... + def encode(self, encoding: str = "utf-8", errors: str = "strict") -> bytes: ... def endswith( self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> str: ... # type: ignore[misc] else: - def expandtabs(self, tabsize: int = ...) -> str: ... + def expandtabs(self, tabsize: int = 8) -> str: ... # type: ignore[misc] def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def format(self, *args: object, **kwargs: object) -> str: ... + def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] def format_map(self, map: _FormatMapMapping) -> str: ... def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def isalnum(self) -> bool: ... @@ -442,74 +442,72 @@ class str(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... - def join(self, __iterable: Iterable[str]) -> str: ... - def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... - def lower(self) -> str: ... - def lstrip(self, __chars: str | None = ...) -> str: ... - def partition(self, __sep: str) -> tuple[str, str, str]: ... - def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... + def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + def ljust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + def lower(self) -> str: ... # type: ignore[misc] + def lstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + def replace(self, __old: str, __new: str, __count: SupportsIndex = -1) -> str: ... # type: ignore[misc] if sys.version_info >= (3, 9): - def removeprefix(self, __prefix: str) -> str: ... - def removesuffix(self, __suffix: str) -> str: ... + def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... - def rpartition(self, __sep: str) -> tuple[str, str, str]: ... - def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... - def rstrip(self, __chars: str | None = ...) -> str: ... - def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... - def splitlines(self, keepends: bool = ...) -> list[str]: ... + def rjust(self, __width: SupportsIndex, __fillchar: str = " ") -> str: ... # type: ignore[misc] + def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + def rsplit(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + def rstrip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + def split(self, sep: str | None = None, maxsplit: SupportsIndex = -1) -> list[str]: ... # type: ignore[misc] + def splitlines(self, keepends: bool = False) -> list[str]: ... # type: ignore[misc] def startswith( self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... - def strip(self, __chars: str | None = ...) -> str: ... - def swapcase(self) -> str: ... - def title(self) -> str: ... - def translate(self, __table: Mapping[int, int | str | None] | Sequence[int | str | None]) -> str: ... - def upper(self) -> str: ... - def zfill(self, __width: SupportsIndex) -> str: ... + def strip(self, __chars: str | None = None) -> str: ... # type: ignore[misc] + def swapcase(self) -> str: ... # type: ignore[misc] + def title(self) -> str: ... # type: ignore[misc] + def translate(self, __table: _TranslateTable) -> str: ... + def upper(self) -> str: ... # type: ignore[misc] + def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] @staticmethod @overload def maketrans(__x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... @staticmethod @overload - def maketrans(__x: str, __y: str, __z: str | None = ...) -> dict[int, int | None]: ... - def __add__(self, __s: str) -> str: ... + def maketrans(__x: str, __y: str) -> dict[int, int]: ... + @staticmethod + @overload + def maketrans(__x: str, __y: str, __z: str) -> dict[int, int | None]: ... + def __add__(self, __s: str) -> str: ... # type: ignore[misc] # Incompatible with Sequence.__contains__ def __contains__(self, __o: str) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ge__(self, __x: str) -> bool: ... def __getitem__(self, __i: SupportsIndex | slice) -> str: ... def __gt__(self, __x: str) -> bool: ... - def __hash__(self) -> int: ... - def __iter__(self) -> Iterator[str]: ... + def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... def __lt__(self, __x: str) -> bool: ... - def __mod__(self, __x: Any) -> str: ... - def __mul__(self, __n: SupportsIndex) -> str: ... + def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] + def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __ne__(self, __x: object) -> bool: ... - def __rmul__(self, __n: SupportsIndex) -> str: ... + def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] def __getnewargs__(self) -> tuple[str]: ... class bytes(ByteString): @overload - def __new__(cls: type[Self], __ints: Iterable[SupportsIndex]) -> Self: ... + def __new__(cls, __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... @overload - def __new__(cls: type[Self], __string: str, encoding: str, errors: str = ...) -> Self: ... + def __new__(cls, __string: str, encoding: str, errors: str = ...) -> Self: ... @overload - def __new__(cls: type[Self], __length: SupportsIndex) -> Self: ... - @overload - def __new__(cls: type[Self]) -> Self: ... - @overload - def __new__(cls: type[Self], __o: SupportsBytes) -> Self: ... + def __new__(cls) -> Self: ... def capitalize(self) -> bytes: ... - def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytes: ... def count( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... - def decode(self, encoding: str = ..., errors: str = ...) -> str: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... def endswith( self, __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -517,7 +515,7 @@ class bytes(ByteString): __end: SupportsIndex | None = ..., ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> bytes: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytes: ... else: def expandtabs(self, tabsize: int = ...) -> bytes: ... @@ -541,11 +539,11 @@ class bytes(ByteString): def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytes: ... - def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytes: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytes: ... def lower(self) -> bytes: ... - def lstrip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def lstrip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... def partition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... - def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = ...) -> bytes: ... + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = -1) -> bytes: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: ReadableBuffer) -> bytes: ... def removesuffix(self, __suffix: ReadableBuffer) -> bytes: ... @@ -556,26 +554,26 @@ class bytes(ByteString): def rindex( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytes: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytes: ... def rpartition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... - def rsplit(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... - def rstrip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... - def split(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... - def splitlines(self, keepends: bool = ...) -> list[bytes]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def rstrip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytes]: ... + def splitlines(self, keepends: bool = False) -> list[bytes]: ... def startswith( self, __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ..., ) -> bool: ... - def strip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def strip(self, __bytes: ReadableBuffer | None = None) -> bytes: ... def swapcase(self) -> bytes: ... def title(self) -> bytes: ... - def translate(self, __table: ReadableBuffer | None, delete: bytes = ...) -> bytes: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = b"") -> bytes: ... def upper(self) -> bytes: ... def zfill(self, __width: SupportsIndex) -> bytes: ... @classmethod - def fromhex(cls: type[Self], __s: str) -> Self: ... + def fromhex(cls, __s: str) -> Self: ... @staticmethod def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... def __len__(self) -> int: ... @@ -589,7 +587,7 @@ class bytes(ByteString): def __rmul__(self, __n: SupportsIndex) -> bytes: ... def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, __o: SupportsIndex | bytes) -> bool: ... # type: ignore[override] + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... def __lt__(self, __x: bytes) -> bool: ... @@ -604,19 +602,17 @@ class bytearray(MutableSequence[int], ByteString): @overload def __init__(self) -> None: ... @overload - def __init__(self, __ints: Iterable[SupportsIndex]) -> None: ... + def __init__(self, __ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer) -> None: ... @overload def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... - @overload - def __init__(self, __length: SupportsIndex) -> None: ... def append(self, __item: SupportsIndex) -> None: ... def capitalize(self) -> bytearray: ... - def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = b" ") -> bytearray: ... def count( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... def copy(self) -> bytearray: ... - def decode(self, encoding: str = ..., errors: str = ...) -> str: ... + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: ... def endswith( self, __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], @@ -624,7 +620,7 @@ class bytearray(MutableSequence[int], ByteString): __end: SupportsIndex | None = ..., ) -> bool: ... if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> bytearray: ... + def expandtabs(self, tabsize: SupportsIndex = 8) -> bytearray: ... else: def expandtabs(self, tabsize: int = ...) -> bytearray: ... @@ -650,43 +646,43 @@ class bytearray(MutableSequence[int], ByteString): def istitle(self) -> bool: ... def isupper(self) -> bool: ... def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytearray: ... - def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytearray: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytearray: ... def lower(self) -> bytearray: ... - def lstrip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def lstrip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... def partition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... - def pop(self, __index: int = ...) -> int: ... + def pop(self, __index: int = -1) -> int: ... def remove(self, __value: int) -> None: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: ReadableBuffer) -> bytearray: ... def removesuffix(self, __suffix: ReadableBuffer) -> bytearray: ... - def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = ...) -> bytearray: ... + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = -1) -> bytearray: ... def rfind( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... def rindex( self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... - def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytearray: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = b" ") -> bytearray: ... def rpartition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... - def rsplit(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... - def rstrip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... - def split(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... - def splitlines(self, keepends: bool = ...) -> list[bytearray]: ... + def rsplit(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def rstrip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... + def split(self, sep: ReadableBuffer | None = None, maxsplit: SupportsIndex = -1) -> list[bytearray]: ... + def splitlines(self, keepends: bool = False) -> list[bytearray]: ... def startswith( self, __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ..., ) -> bool: ... - def strip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def strip(self, __bytes: ReadableBuffer | None = None) -> bytearray: ... def swapcase(self) -> bytearray: ... def title(self) -> bytearray: ... - def translate(self, __table: ReadableBuffer | None, delete: bytes = ...) -> bytearray: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = b"") -> bytearray: ... def upper(self) -> bytearray: ... def zfill(self, __width: SupportsIndex) -> bytearray: ... @classmethod - def fromhex(cls: type[Self], __string: str) -> Self: ... + def fromhex(cls, __string: str) -> Self: ... @staticmethod def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... def __len__(self) -> int: ... @@ -703,23 +699,23 @@ class bytearray(MutableSequence[int], ByteString): def __delitem__(self, __i: SupportsIndex | slice) -> None: ... def __add__(self, __s: ReadableBuffer) -> bytearray: ... # The superclass wants us to accept Iterable[int], but that fails at runtime. - def __iadd__(self: Self, __s: ReadableBuffer) -> Self: ... # type: ignore[override] + def __iadd__(self, __s: ReadableBuffer) -> Self: ... # type: ignore[override] def __mul__(self, __n: SupportsIndex) -> bytearray: ... def __rmul__(self, __n: SupportsIndex) -> bytearray: ... - def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __imul__(self, __n: SupportsIndex) -> Self: ... def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] def __eq__(self, __x: object) -> bool: ... def __ne__(self, __x: object) -> bool: ... - def __lt__(self, __x: bytes) -> bool: ... - def __le__(self, __x: bytes) -> bool: ... - def __gt__(self, __x: bytes) -> bool: ... - def __ge__(self, __x: bytes) -> bool: ... + def __lt__(self, __x: ReadableBuffer) -> bool: ... + def __le__(self, __x: ReadableBuffer) -> bool: ... + def __gt__(self, __x: ReadableBuffer) -> bool: ... + def __ge__(self, __x: ReadableBuffer) -> bool: ... def __alloc__(self) -> int: ... @final -class memoryview(Sized, Sequence[int]): +class memoryview(Sequence[int]): @property def format(self) -> str: ... @property @@ -735,7 +731,7 @@ class memoryview(Sized, Sequence[int]): @property def ndim(self) -> int: ... @property - def obj(self) -> bytes | bytearray: ... + def obj(self) -> ReadableBuffer: ... @property def c_contiguous(self) -> bool: ... @property @@ -745,7 +741,7 @@ class memoryview(Sized, Sequence[int]): @property def nbytes(self) -> int: ... def __init__(self, obj: ReadableBuffer) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None ) -> None: ... @@ -761,8 +757,10 @@ class memoryview(Sized, Sequence[int]): def __setitem__(self, __s: slice, __o: ReadableBuffer) -> None: ... @overload def __setitem__(self, __i: SupportsIndex, __o: SupportsIndex) -> None: ... - if sys.version_info >= (3, 8): - def tobytes(self, order: Literal["C", "F", "A"] | None = ...) -> bytes: ... + if sys.version_info >= (3, 10): + def tobytes(self, order: Literal["C", "F", "A"] | None = "C") -> bytes: ... + elif sys.version_info >= (3, 8): + def tobytes(self, order: Literal["C", "F", "A"] | None = None) -> bytes: ... else: def tobytes(self) -> bytes: ... @@ -778,7 +776,7 @@ class memoryview(Sized, Sequence[int]): @final class bool(int): - def __new__(cls: type[Self], __o: object = ...) -> Self: ... + def __new__(cls, __o: object = ...) -> Self: ... # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). @overload @@ -823,7 +821,7 @@ class slice: def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): - def __new__(cls: type[Self], __iterable: Iterable[_T_co] = ...) -> Self: ... + def __new__(cls, __iterable: Iterable[_T_co] = ...) -> Self: ... def __len__(self) -> int: ... def __contains__(self, __x: object) -> bool: ... @overload @@ -842,7 +840,7 @@ class tuple(Sequence[_T_co], Generic[_T_co]): def __mul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... def __rmul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... def count(self, __value: Any) -> int: ... - def index(self, __value: Any, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def index(self, __value: Any, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -868,7 +866,7 @@ class function: __module__: str # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. - def __get__(self, obj: object | None, type: type | None = ...) -> Any: ... + def __get__(self, obj: object, type: type | None = ...) -> Any: ... class list(MutableSequence[_T], Generic[_T]): @overload @@ -878,10 +876,10 @@ class list(MutableSequence[_T], Generic[_T]): def copy(self) -> list[_T]: ... def append(self, __object: _T) -> None: ... def extend(self, __iterable: Iterable[_T]) -> None: ... - def pop(self, __index: SupportsIndex = ...) -> _T: ... + def pop(self, __index: SupportsIndex = -1) -> _T: ... # Signature of `list.index` should be kept in line with `collections.UserList.index()` # and multiprocessing.managers.ListProxy.index() - def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def index(self, __value: _T, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... def count(self, __value: _T) -> int: ... def insert(self, __index: SupportsIndex, __object: _T) -> None: ... def remove(self, __value: _T) -> None: ... @@ -891,9 +889,9 @@ class list(MutableSequence[_T], Generic[_T]): # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload - def sort(self: list[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: list[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... @overload - def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... __hash__: ClassVar[None] # type: ignore[assignment] @@ -911,10 +909,10 @@ class list(MutableSequence[_T], Generic[_T]): def __add__(self, __x: list[_T]) -> list[_T]: ... @overload def __add__(self, __x: list[_S]) -> list[_S | _T]: ... - def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... # type: ignore[misc] + def __iadd__(self, __x: Iterable[_T]) -> Self: ... # type: ignore[misc] def __mul__(self, __n: SupportsIndex) -> list[_T]: ... def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... - def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __imul__(self, __n: SupportsIndex) -> Self: ... def __contains__(self, __o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... def __gt__(self, __x: list[_T]) -> bool: ... @@ -943,7 +941,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error @overload def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ... - def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... def copy(self) -> dict[_KT, _VT]: ... def keys(self) -> dict_keys[_KT, _VT]: ... def values(self) -> dict_values[_KT, _VT]: ... @@ -953,7 +951,7 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, __iterable: Iterable[_T], __value: None = ...) -> dict[_T, Any | None]: ... + def fromkeys(cls, __iterable: Iterable[_T], __value: None = None) -> dict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ... @@ -967,9 +965,9 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): @overload def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... def __len__(self) -> int: ... - def __getitem__(self, __k: _KT) -> _VT: ... - def __setitem__(self, __k: _KT, __v: _VT) -> None: ... - def __delitem__(self, __v: _KT) -> None: ... + def __getitem__(self, __key: _KT) -> _VT: ... + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT]: ... @@ -980,9 +978,9 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... # dict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... class set(MutableSet[_T], Generic[_T]): @overload @@ -1008,13 +1006,13 @@ class set(MutableSet[_T], Generic[_T]): def __contains__(self, __o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... def __and__(self, __s: AbstractSet[object]) -> set[_T]: ... - def __iand__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __iand__(self, __s: AbstractSet[object]) -> Self: ... def __or__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ior__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __ior__(self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __sub__(self, __s: AbstractSet[_T | None]) -> set[_T]: ... - def __isub__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __isub__(self, __s: AbstractSet[object]) -> Self: ... def __xor__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... - def __ixor__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __ixor__(self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] def __le__(self, __s: AbstractSet[object]) -> bool: ... def __lt__(self, __s: AbstractSet[object]) -> bool: ... def __ge__(self, __s: AbstractSet[object]) -> bool: ... @@ -1025,9 +1023,9 @@ class set(MutableSet[_T], Generic[_T]): class frozenset(AbstractSet[_T_co], Generic[_T_co]): @overload - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... @overload - def __new__(cls: type[Self], __iterable: Iterable[_T_co]) -> Self: ... + def __new__(cls, __iterable: Iterable[_T_co]) -> Self: ... def copy(self) -> frozenset[_T_co]: ... def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... @@ -1052,7 +1050,7 @@ class frozenset(AbstractSet[_T_co], Generic[_T_co]): class enumerate(Iterator[tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[int, _T]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -1103,7 +1101,7 @@ class property: class _NotImplementedType(Any): # type: ignore[misc] # A little weird, but typing the __call__ as NotImplemented makes the error message # for NotImplemented() much better - __call__: NotImplemented # type: ignore[valid-type] + __call__: NotImplemented # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] NotImplemented: _NotImplementedType @@ -1135,27 +1133,90 @@ if sys.version_info >= (3, 10): @overload async def anext(__i: SupportsAnext[_T], default: _VT) -> _T | _VT: ... -# TODO: `compile` has a more precise return type in reality; work on a way of expressing that? +# compile() returns a CodeType, unless the flags argument includes PyCF_ONLY_AST (=1024), +# in which case it returns ast.AST. We have overloads for flag 0 (the default) and for +# explicitly passing PyCF_ONLY_AST. We fall back to Any for other values of flags. if sys.version_info >= (3, 8): + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[0], + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, + ) -> CodeType: ... + @overload def compile( - source: str | ReadableBuffer | AST, + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, - flags: int = ..., - dont_inherit: int = ..., - optimize: int = ..., *, - _feature_version: int = ..., + dont_inherit: bool = False, + optimize: int = -1, + _feature_version: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[1024], + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, + ) -> _ast.AST: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int, + dont_inherit: bool = False, + optimize: int = -1, + *, + _feature_version: int = -1, ) -> Any: ... else: + @overload def compile( - source: str | ReadableBuffer | AST, + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, filename: str | ReadableBuffer | _PathLike[Any], mode: str, - flags: int = ..., - dont_inherit: int = ..., - optimize: int = ..., + flags: Literal[0], + dont_inherit: bool = False, + optimize: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + *, + dont_inherit: bool = False, + optimize: int = -1, + ) -> CodeType: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: Literal[1024], + dont_inherit: bool = False, + optimize: int = -1, + ) -> _ast.AST: ... + @overload + def compile( + source: str | ReadableBuffer | _ast.Module | _ast.Expression | _ast.Interactive, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int, + dont_inherit: bool = False, + optimize: int = -1, ) -> Any: ... def copyright() -> None: ... @@ -1170,27 +1231,29 @@ def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ... # The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance. # (The `globals` argument has to be a "real dict", rather than any old mapping, unlike the `locals` argument.) def eval( - __source: str | ReadableBuffer | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, object] | None = ... + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, ) -> Any: ... # Comment above regarding `eval` applies to `exec` as well if sys.version_info >= (3, 11): def exec( __source: str | ReadableBuffer | CodeType, - __globals: dict[str, Any] | None = ..., - __locals: Mapping[str, object] | None = ..., + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, *, - closure: tuple[_Cell, ...] | None = ..., + closure: tuple[_Cell, ...] | None = None, ) -> None: ... else: def exec( __source: str | ReadableBuffer | CodeType, - __globals: dict[str, Any] | None = ..., - __locals: Mapping[str, object] | None = ..., + __globals: dict[str, Any] | None = None, + __locals: Mapping[str, object] | None = None, ) -> None: ... -def exit(code: object = ...) -> NoReturn: ... +def exit(code: sys._ExitCode = None) -> NoReturn: ... class filter(Iterator[_T], Generic[_T]): @overload @@ -1199,10 +1262,10 @@ class filter(Iterator[_T], Generic[_T]): def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ... @overload def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... -def format(__value: object, __format_spec: str = ...) -> str: ... +def format(__value: object, __format_spec: str = "") -> str: ... @overload def getattr(__o: object, __name: str) -> Any: ... @@ -1225,7 +1288,7 @@ def hash(__obj: object) -> int: ... def help(request: object = ...) -> None: ... def hex(__number: int | SupportsIndex) -> str: ... def id(__obj: object) -> int: ... -def input(__prompt: object = ...) -> str: ... +def input(__prompt: object = "") -> str: ... class _GetItemIterable(Protocol[_T_co]): def __getitem__(self, __i: int) -> _T_co: ... @@ -1239,19 +1302,13 @@ def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: @overload def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... -# We need recursive types to express the type of the second argument to `isinstance` properly, hence the use of `Any` if sys.version_info >= (3, 10): - def isinstance( - __obj: object, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - def issubclass( - __cls: type, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] - ) -> bool: ... - + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] else: - def isinstance(__obj: object, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... - def issubclass(__cls: type, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] +def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... +def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... def len(__obj: Sized) -> int: ... def license() -> None: ... def locals() -> dict[str, Any]: ... @@ -1296,35 +1353,35 @@ class map(Iterator[_S], Generic[_S]): __iter6: Iterable[Any], *iterables: Iterable[Any], ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _S: ... @overload def max( - __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = None ) -> SupportsRichComparisonT: ... @overload def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None) -> SupportsRichComparisonT: ... @overload def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... @overload def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload def min( - __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = None ) -> SupportsRichComparisonT: ... @overload def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None) -> SupportsRichComparisonT: ... @overload def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = None, default: _T) -> SupportsRichComparisonT | _T: ... @overload def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload @@ -1333,94 +1390,93 @@ def next(__i: SupportsNext[_T]) -> _T: ... def next(__i: SupportsNext[_T], __default: _VT) -> _T | _VT: ... def oct(__number: int | SupportsIndex) -> str: ... -_OpenFile = StrOrBytesPath | int # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed _Opener: TypeAlias = Callable[[str, int], int] # Text mode: always returns a TextIOWrapper @overload def open( - file: _OpenFile, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + file: FileDescriptorOrPath, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> TextIOWrapper: ... # Unbuffered binary mode: returns a FileIO @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryMode, buffering: Literal[0], - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> FileIO: ... # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeUpdating, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BufferedRandom: ... @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeWriting, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BufferedWriter: ... @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryModeReading, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BufferedReader: ... # Buffering cannot be determined: fall back to BinaryIO @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: OpenBinaryMode, - buffering: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> BinaryIO: ... # Fallback if mode is not specified @overload def open( - file: _OpenFile, + file: FileDescriptorOrPath, mode: str, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: _Opener | None = ..., + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, ) -> IO[Any]: ... def ord(__c: str | bytes | bytearray) -> int: ... @@ -1430,14 +1486,14 @@ class _SupportsWriteAndFlush(SupportsWrite[_T_contra], Protocol[_T_contra]): @overload def print( *values: object, - sep: str | None = ..., - end: str | None = ..., - file: SupportsWrite[str] | None = ..., - flush: Literal[False] = ..., + sep: str | None = " ", + end: str | None = "\n", + file: SupportsWrite[str] | None = None, + flush: Literal[False] = False, ) -> None: ... @overload def print( - *values: object, sep: str | None = ..., end: str | None = ..., file: _SupportsWriteAndFlush[str] | None = ..., flush: bool + *values: object, sep: str | None = " ", end: str | None = "\n", file: _SupportsWriteAndFlush[str] | None = None, flush: bool ) -> None: ... _E = TypeVar("_E", contravariant=True) @@ -1447,7 +1503,7 @@ class _SupportsPow2(Protocol[_E, _T_co]): def __pow__(self, __other: _E) -> _T_co: ... class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): - def __pow__(self, __other: _E, __modulo: None = ...) -> _T_co: ... + def __pow__(self, __other: _E, __modulo: None = None) -> _T_co: ... class _SupportsPow3(Protocol[_E, _M, _T_co]): def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ... @@ -1457,106 +1513,113 @@ _SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs a ) if sys.version_info >= (3, 8): - @overload - def pow(base: int, exp: int, mod: Literal[0]) -> NoReturn: ... + # TODO: `pow(int, int, Literal[0])` fails at runtime, + # but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566). @overload def pow(base: int, exp: int, mod: int) -> int: ... @overload - def pow(base: int, exp: Literal[0], mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + def pow(base: int, exp: Literal[0], mod: None = None) -> Literal[1]: ... # type: ignore[misc] @overload - def pow(base: int, exp: _PositiveInteger, mod: None = ...) -> int: ... # type: ignore[misc] + def pow(base: int, exp: _PositiveInteger, mod: None = None) -> int: ... # type: ignore[misc] @overload - def pow(base: int, exp: _NegativeInteger, mod: None = ...) -> float: ... # type: ignore[misc] + def pow(base: int, exp: _NegativeInteger, mod: None = None) -> float: ... # type: ignore[misc] # int base & positive-int exp -> int; int base & negative-int exp -> float # return type must be Any as `int | float` causes too many false-positive errors @overload - def pow(base: int, exp: int, mod: None = ...) -> Any: ... + def pow(base: int, exp: int, mod: None = None) -> Any: ... @overload - def pow(base: _PositiveInteger, exp: float, mod: None = ...) -> float: ... + def pow(base: _PositiveInteger, exp: float, mod: None = None) -> float: ... @overload - def pow(base: _NegativeInteger, exp: float, mod: None = ...) -> complex: ... + def pow(base: _NegativeInteger, exp: float, mod: None = None) -> complex: ... @overload - def pow(base: float, exp: int, mod: None = ...) -> float: ... + def pow(base: float, exp: int, mod: None = None) -> float: ... # float base & float exp could return float or complex # return type must be Any (same as complex base, complex exp), # as `float | complex` causes too many false-positive errors @overload - def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> Any: ... + def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> Any: ... @overload - def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> complex: ... + def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = None) -> complex: ... @overload - def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... @overload - def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = None) -> _T_co: ... @overload - def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M = ...) -> _T_co: ... + def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... @overload - def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = ...) -> Any: ... + def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = None) -> Any: ... @overload - def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = ...) -> complex: ... + def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = None) -> complex: ... else: - @overload - def pow(__base: int, __exp: int, __mod: Literal[0]) -> NoReturn: ... @overload def pow(__base: int, __exp: int, __mod: int) -> int: ... @overload - def pow(__base: int, __exp: Literal[0], __mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + def pow(__base: int, __exp: Literal[0], __mod: None = None) -> Literal[1]: ... # type: ignore[misc] @overload - def pow(__base: int, __exp: _PositiveInteger, __mod: None = ...) -> int: ... # type: ignore[misc] + def pow(__base: int, __exp: _PositiveInteger, __mod: None = None) -> int: ... # type: ignore[misc] @overload - def pow(__base: int, __exp: _NegativeInteger, __mod: None = ...) -> float: ... # type: ignore[misc] + def pow(__base: int, __exp: _NegativeInteger, __mod: None = None) -> float: ... # type: ignore[misc] @overload - def pow(__base: int, __exp: int, __mod: None = ...) -> Any: ... + def pow(__base: int, __exp: int, __mod: None = None) -> Any: ... @overload - def pow(__base: _PositiveInteger, __exp: float, __mod: None = ...) -> float: ... + def pow(__base: _PositiveInteger, __exp: float, __mod: None = None) -> float: ... @overload - def pow(__base: _NegativeInteger, __exp: float, __mod: None = ...) -> complex: ... + def pow(__base: _NegativeInteger, __exp: float, __mod: None = None) -> complex: ... @overload - def pow(__base: float, __exp: int, __mod: None = ...) -> float: ... + def pow(__base: float, __exp: int, __mod: None = None) -> float: ... @overload - def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> Any: ... + def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = None) -> Any: ... @overload - def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> complex: ... + def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = None) -> complex: ... @overload - def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... + def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = None) -> _T_co: ... @overload - def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... + def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = None) -> _T_co: ... @overload - def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M = ...) -> _T_co: ... + def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M) -> _T_co: ... @overload - def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = ...) -> Any: ... + def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = None) -> Any: ... @overload - def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = ...) -> complex: ... + def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = None) -> complex: ... -def quit(code: object = ...) -> NoReturn: ... +def quit(code: sys._ExitCode = None) -> NoReturn: ... class reversed(Iterator[_T], Generic[_T]): @overload def __init__(self, __sequence: Reversible[_T]) -> None: ... @overload def __init__(self, __sequence: SupportsLenAndGetItem[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... def __length_hint__(self) -> int: ... def repr(__obj: object) -> str: ... + +# See https://github.com/python/typeshed/pull/9141 +# and https://github.com/python/typeshed/pull/9151 +# on why we don't use `SupportsRound` from `typing.pyi` + +class _SupportsRound1(Protocol[_T_co]): + def __round__(self) -> _T_co: ... + +class _SupportsRound2(Protocol[_T_co]): + def __round__(self, __ndigits: int) -> _T_co: ... + @overload -def round(number: SupportsRound[Any]) -> int: ... -@overload -def round(number: SupportsRound[Any], ndigits: None) -> int: ... +def round(number: _SupportsRound1[_T], ndigits: None = None) -> _T: ... @overload -def round(number: SupportsRound[_T], ndigits: SupportsIndex) -> _T: ... +def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... # See https://github.com/python/typeshed/pull/6292#discussion_r748875189 # for why arg 3 of `setattr` should be annotated with `Any` and not `object` def setattr(__obj: object, __name: str, __value: Any) -> None: ... @overload def sorted( - __iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ... + __iterable: Iterable[SupportsRichComparisonT], *, key: None = None, reverse: bool = False ) -> list[SupportsRichComparisonT]: ... @overload -def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> list[_T]: ... +def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> list[_T]: ... _AddableT1 = TypeVar("_AddableT1", bound=SupportsAdd[Any, Any]) _AddableT2 = TypeVar("_AddableT2", bound=SupportsAdd[Any, Any]) @@ -1571,11 +1634,11 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit # Instead, we special-case the most common examples of this: bool and literal integers. if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], start: int = 0) -> int: ... # type: ignore[misc] else: @overload - def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = ...) -> int: ... # type: ignore[misc] + def sum(__iterable: Iterable[bool], __start: int = 0) -> int: ... # type: ignore[misc] @overload def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... @@ -1588,8 +1651,12 @@ else: @overload def sum(__iterable: Iterable[_AddableT1], __start: _AddableT2) -> _AddableT1 | _AddableT2: ... -# The argument to `vars()` has to have a `__dict__` attribute, so can't be annotated with `object` +# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` # (A "SupportsDunderDict" protocol doesn't work) +# Use a type: ignore to make complaints about overlapping overloads go away +@overload +def vars(__object: type) -> types.MappingProxyType[str, Any]: ... # type: ignore[misc] +@overload def vars(__object: Any = ...) -> dict[str, Any]: ... class zip(Iterator[_T_co], Generic[_T_co]): @@ -1667,17 +1734,17 @@ class zip(Iterator[_T_co], Generic[_T_co]): *iterables: Iterable[Any], ) -> zip[tuple[Any, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` # Return type of `__import__` should be kept the same as return type of `importlib.import_module` def __import__( name: str, - globals: Mapping[str, object] | None = ..., - locals: Mapping[str, object] | None = ..., + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, fromlist: Sequence[str] = ..., - level: int = ..., + level: int = 0, ) -> types.ModuleType: ... def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... @@ -1697,7 +1764,7 @@ class BaseException: __traceback__: TracebackType | None def __init__(self, *args: object) -> None: ... def __setstate__(self, __state: dict[str, Any] | None) -> None: ... - def with_traceback(self: Self, __tb: TracebackType | None) -> Self: ... + def with_traceback(self, __tb: TracebackType | None) -> Self: ... if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] @@ -1707,15 +1774,13 @@ class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... class SystemExit(BaseException): - code: int + code: sys._ExitCode class Exception(BaseException): ... class StopIteration(Exception): value: Any -_StandardError = Exception - class OSError(Exception): errno: int strerror: str @@ -1730,37 +1795,38 @@ IOError = OSError if sys.platform == "win32": WindowsError = OSError -class ArithmeticError(_StandardError): ... -class AssertionError(_StandardError): ... +class ArithmeticError(Exception): ... +class AssertionError(Exception): ... -class AttributeError(_StandardError): +class AttributeError(Exception): if sys.version_info >= (3, 10): + def __init__(self, *args: object, name: str | None = ..., obj: object = ...) -> None: ... name: str obj: object -class BufferError(_StandardError): ... -class EOFError(_StandardError): ... +class BufferError(Exception): ... +class EOFError(Exception): ... -class ImportError(_StandardError): +class ImportError(Exception): def __init__(self, *args: object, name: str | None = ..., path: str | None = ...) -> None: ... name: str | None path: str | None msg: str # undocumented -class LookupError(_StandardError): ... -class MemoryError(_StandardError): ... +class LookupError(Exception): ... +class MemoryError(Exception): ... -class NameError(_StandardError): +class NameError(Exception): if sys.version_info >= (3, 10): name: str -class ReferenceError(_StandardError): ... -class RuntimeError(_StandardError): ... +class ReferenceError(Exception): ... +class RuntimeError(Exception): ... class StopAsyncIteration(Exception): value: Any -class SyntaxError(_StandardError): +class SyntaxError(Exception): msg: str lineno: int | None offset: int | None @@ -1770,9 +1836,9 @@ class SyntaxError(_StandardError): end_lineno: int | None end_offset: int | None -class SystemError(_StandardError): ... -class TypeError(_StandardError): ... -class ValueError(_StandardError): ... +class SystemError(Exception): ... +class TypeError(Exception): ... +class ValueError(Exception): ... class FloatingPointError(ArithmeticError): ... class OverflowError(ArithmeticError): ... class ZeroDivisionError(ArithmeticError): ... @@ -1849,29 +1915,48 @@ if sys.version_info >= (3, 11): _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) _ExceptionT = TypeVar("_ExceptionT", bound=Exception) + # See `check_exception_group.py` for use-cases and comments. class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): - def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... + def __new__(cls, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... + def __init__(self, __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> None: ... @property def message(self) -> str: ... @property def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... @overload + def subgroup( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload def subgroup( self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... @overload - def subgroup(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> Self | None: ... + def subgroup( + self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: ... @overload def split( - self: Self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] - ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, Self | None]: ... + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... @overload - def split(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... - def derive(self: Self, __excs: Sequence[_BaseExceptionT_co]) -> Self: ... + def split( + self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + @overload + def split( + self, __condition: Callable[[_BaseExceptionT_co | Self], bool] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT_co] | None, BaseExceptionGroup[_BaseExceptionT_co] | None]: ... + # In reality it is `NonEmptySequence`: + @overload + def derive(self, __excs: Sequence[_ExceptionT]) -> ExceptionGroup[_ExceptionT]: ... + @overload + def derive(self, __excs: Sequence[_BaseExceptionT]) -> BaseExceptionGroup[_BaseExceptionT]: ... def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): - def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... + def __new__(cls, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... + def __init__(self, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> None: ... @property def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... # We accept a narrower type, but that's OK. @@ -1880,10 +1965,12 @@ if sys.version_info >= (3, 11): self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] ) -> ExceptionGroup[_ExceptionT] | None: ... @overload - def subgroup(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> Self | None: ... + def subgroup(self, __condition: Callable[[_ExceptionT_co | Self], bool]) -> ExceptionGroup[_ExceptionT_co] | None: ... @overload # type: ignore[override] def split( - self: Self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> tuple[ExceptionGroup[_ExceptionT] | None, Self | None]: ... + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None]: ... @overload - def split(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... + def split( + self, __condition: Callable[[_ExceptionT_co | Self], bool] + ) -> tuple[ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None]: ... diff --git a/mypy/typeshed/stdlib/bz2.pyi b/mypy/typeshed/stdlib/bz2.pyi index cea317e28037..9ad80ee6f731 100644 --- a/mypy/typeshed/stdlib/bz2.pyi +++ b/mypy/typeshed/stdlib/bz2.pyi @@ -1,10 +1,10 @@ import _compression import sys from _compression import BaseStream -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Iterable from typing import IO, Any, Protocol, TextIO, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] @@ -19,8 +19,8 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -def compress(data: bytes, compresslevel: int = ...) -> bytes: ... -def decompress(data: bytes) -> bytes: ... +def compress(data: ReadableBuffer, compresslevel: int = 9) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... _ReadBinaryMode: TypeAlias = Literal["", "r", "rb"] _WriteBinaryMode: TypeAlias = Literal["w", "wb", "x", "xb", "a", "ab"] @@ -30,114 +30,114 @@ _WriteTextMode: TypeAlias = Literal["wt", "xt", "at"] @overload def open( filename: _ReadableFileobj, - mode: _ReadBinaryMode = ..., - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BZ2File: ... @overload def open( filename: _ReadableFileobj, mode: _ReadTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: _WritableFileobj, mode: _WriteBinaryMode, - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BZ2File: ... @overload def open( filename: _WritableFileobj, mode: _WriteTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath, - mode: _ReadBinaryMode | _WriteBinaryMode = ..., - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + mode: _ReadBinaryMode | _WriteBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BZ2File: ... @overload def open( filename: StrOrBytesPath, mode: _ReadTextMode | _WriteTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, mode: str, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BZ2File | TextIO: ... class BZ2File(BaseStream, IO[bytes]): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... if sys.version_info >= (3, 9): @overload - def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = ...) -> None: ... + def __init__(self, filename: _WritableFileobj, mode: _WriteBinaryMode, *, compresslevel: int = 9) -> None: ... @overload - def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = ..., *, compresslevel: int = ...) -> None: ... + def __init__(self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", *, compresslevel: int = 9) -> None: ... @overload def __init__( - self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = ..., *, compresslevel: int = ... + self, filename: StrOrBytesPath, mode: _ReadBinaryMode | _WriteBinaryMode = "r", *, compresslevel: int = 9 ) -> None: ... else: @overload def __init__( - self, filename: _WritableFileobj, mode: _WriteBinaryMode, buffering: Any | None = ..., compresslevel: int = ... + self, filename: _WritableFileobj, mode: _WriteBinaryMode, buffering: Any | None = None, compresslevel: int = 9 ) -> None: ... @overload def __init__( - self, filename: _ReadableFileobj, mode: _ReadBinaryMode = ..., buffering: Any | None = ..., compresslevel: int = ... + self, filename: _ReadableFileobj, mode: _ReadBinaryMode = "r", buffering: Any | None = None, compresslevel: int = 9 ) -> None: ... @overload def __init__( self, filename: StrOrBytesPath, - mode: _ReadBinaryMode | _WriteBinaryMode = ..., - buffering: Any | None = ..., - compresslevel: int = ..., + mode: _ReadBinaryMode | _WriteBinaryMode = "r", + buffering: Any | None = None, + compresslevel: int = 9, ) -> None: ... - def read(self, size: int | None = ...) -> bytes: ... - def read1(self, size: int = ...) -> bytes: ... - def readline(self, size: SupportsIndex = ...) -> bytes: ... # type: ignore[override] + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: SupportsIndex = -1) -> bytes: ... # type: ignore[override] def readinto(self, b: WriteableBuffer) -> int: ... - def readlines(self, size: SupportsIndex = ...) -> list[bytes]: ... - def seek(self, offset: int, whence: int = ...) -> int: ... + def readlines(self, size: SupportsIndex = -1) -> list[bytes]: ... + def seek(self, offset: int, whence: int = 0) -> int: ... def write(self, data: ReadableBuffer) -> int: ... def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... @final class BZ2Compressor: def __init__(self, compresslevel: int = ...) -> None: ... - def compress(self, __data: bytes) -> bytes: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... def flush(self) -> bytes: ... @final class BZ2Decompressor: - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property def eof(self) -> bool: ... @property diff --git a/mypy/typeshed/stdlib/cProfile.pyi b/mypy/typeshed/stdlib/cProfile.pyi index 6e21fc92ade5..8945b21427ab 100644 --- a/mypy/typeshed/stdlib/cProfile.pyi +++ b/mypy/typeshed/stdlib/cProfile.pyi @@ -1,15 +1,15 @@ import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import StrOrBytesPath, Unused from collections.abc import Callable from types import CodeType from typing import Any, TypeVar -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = ..., sort: str | int = ...) -> None: ... +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... def runctx( - statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = ..., sort: str | int = ... + statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: ... _T = TypeVar("_T") @@ -23,15 +23,15 @@ class Profile: ) -> None: ... def enable(self) -> None: ... def disable(self) -> None: ... - def print_stats(self, sort: str | int = ...) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... - def run(self: Self, cmd: str) -> Self: ... - def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... if sys.version_info >= (3, 8): - def __enter__(self: Self) -> Self: ... - def __exit__(self, *exc_info: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *exc_info: Unused) -> None: ... def label(code: str | CodeType) -> _Label: ... # undocumented diff --git a/mypy/typeshed/stdlib/calendar.pyi b/mypy/typeshed/stdlib/calendar.pyi index 4faee805333b..255a12d3348a 100644 --- a/mypy/typeshed/stdlib/calendar.pyi +++ b/mypy/typeshed/stdlib/calendar.pyi @@ -1,7 +1,9 @@ import datetime import sys +from _typeshed import Unused from collections.abc import Iterable, Sequence from time import struct_time +from typing import ClassVar from typing_extensions import Literal, TypeAlias __all__ = [ @@ -49,7 +51,7 @@ def monthrange(year: int, month: int) -> tuple[int, int]: ... class Calendar: firstweekday: int - def __init__(self, firstweekday: int = ...) -> None: ... + def __init__(self, firstweekday: int = 0) -> None: ... def getfirstweekday(self) -> int: ... def setfirstweekday(self, firstweekday: int) -> None: ... def iterweekdays(self) -> Iterable[int]: ... @@ -59,9 +61,9 @@ class Calendar: def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... - def yeardatescalendar(self, year: int, width: int = ...) -> list[list[int]]: ... - def yeardays2calendar(self, year: int, width: int = ...) -> list[list[tuple[int, int]]]: ... - def yeardayscalendar(self, year: int, width: int = ...) -> list[list[int]]: ... + def yeardatescalendar(self, year: int, width: int = 3) -> list[list[int]]: ... + def yeardays2calendar(self, year: int, width: int = 3) -> list[list[tuple[int, int]]]: ... + def yeardayscalendar(self, year: int, width: int = 3) -> list[list[int]]: ... def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... @@ -71,59 +73,59 @@ class TextCalendar(Calendar): def formatweek(self, theweek: int, width: int) -> str: ... def formatweekday(self, day: int, width: int) -> str: ... def formatweekheader(self, width: int) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... - def prmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... - def formatmonth(self, theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... - def formatyear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... - def pryear(self, theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... + def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = True) -> str: ... + def prmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... + def formatmonth(self, theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... + def formatyear(self, theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... + def pryear(self, theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... def firstweekday() -> int: ... def monthcalendar(year: int, month: int) -> list[list[int]]: ... def prweek(theweek: int, width: int) -> None: ... def week(theweek: int, width: int) -> str: ... def weekheader(width: int) -> str: ... -def prmonth(theyear: int, themonth: int, w: int = ..., l: int = ...) -> None: ... -def month(theyear: int, themonth: int, w: int = ..., l: int = ...) -> str: ... -def calendar(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> str: ... -def prcal(theyear: int, w: int = ..., l: int = ..., c: int = ..., m: int = ...) -> None: ... +def prmonth(theyear: int, themonth: int, w: int = 0, l: int = 0) -> None: ... +def month(theyear: int, themonth: int, w: int = 0, l: int = 0) -> str: ... +def calendar(theyear: int, w: int = 2, l: int = 1, c: int = 6, m: int = 3) -> str: ... +def prcal(theyear: int, w: int = 0, l: int = 0, c: int = 6, m: int = 3) -> None: ... class HTMLCalendar(Calendar): + cssclasses: ClassVar[list[str]] + cssclass_noday: ClassVar[str] + cssclasses_weekday_head: ClassVar[list[str]] + cssclass_month_head: ClassVar[str] + cssclass_month: ClassVar[str] + cssclass_year: ClassVar[str] + cssclass_year_head: ClassVar[str] def formatday(self, day: int, weekday: int) -> str: ... def formatweek(self, theweek: int) -> str: ... def formatweekday(self, day: int) -> str: ... def formatweekheader(self) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... - def formatmonth(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... - def formatyear(self, theyear: int, width: int = ...) -> str: ... - def formatyearpage(self, theyear: int, width: int = ..., css: str | None = ..., encoding: str | None = ...) -> str: ... - cssclasses: list[str] - cssclass_noday: str - cssclasses_weekday_head: list[str] - cssclass_month_head: str - cssclass_month: str - cssclass_year: str - cssclass_year_head: str + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatmonth(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... + def formatyear(self, theyear: int, width: int = 3) -> str: ... + def formatyearpage( + self, theyear: int, width: int = 3, css: str | None = "calendar.css", encoding: str | None = None + ) -> str: ... class different_locale: def __init__(self, locale: _LocaleType) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... class LocaleTextCalendar(TextCalendar): - def __init__(self, firstweekday: int = ..., locale: _LocaleType | None = ...) -> None: ... - def formatweekday(self, day: int, width: int) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, width: int, withyear: bool = ...) -> str: ... + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... class LocaleHTMLCalendar(HTMLCalendar): - def __init__(self, firstweekday: int = ..., locale: _LocaleType | None = ...) -> None: ... + def __init__(self, firstweekday: int = 0, locale: _LocaleType | None = None) -> None: ... def formatweekday(self, day: int) -> str: ... - def formatmonthname(self, theyear: int, themonth: int, withyear: bool = ...) -> str: ... + def formatmonthname(self, theyear: int, themonth: int, withyear: bool = True) -> str: ... c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... -def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... -def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... +def format(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... +def formatstring(cols: int, colwidth: int = 20, spacing: int = 6) -> str: ... def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... # Data attributes diff --git a/mypy/typeshed/stdlib/cgi.pyi b/mypy/typeshed/stdlib/cgi.pyi index 523b44793941..a2acfa92d463 100644 --- a/mypy/typeshed/stdlib/cgi.pyi +++ b/mypy/typeshed/stdlib/cgi.pyi @@ -1,9 +1,11 @@ import sys -from _typeshed import Self, SupportsGetItem, SupportsItemAccess +from _typeshed import SupportsGetItem, SupportsItemAccess, Unused from builtins import list as _list, type as _type from collections.abc import Iterable, Iterator, Mapping +from email.message import Message from types import TracebackType from typing import IO, Any, Protocol +from typing_extensions import Self __all__ = [ "MiniFieldStorage", @@ -24,11 +26,11 @@ if sys.version_info < (3, 8): __all__ += ["parse_qs", "parse_qsl", "escape"] def parse( - fp: IO[Any] | None = ..., + fp: IO[Any] | None = None, environ: SupportsItemAccess[str, str] = ..., keep_blank_values: bool = ..., strict_parsing: bool = ..., - separator: str = ..., + separator: str = "&", ) -> dict[str, list[str]]: ... if sys.version_info < (3, 8): @@ -36,7 +38,7 @@ if sys.version_info < (3, 8): def parse_qsl(qs: str, keep_blank_values: bool = ..., strict_parsing: bool = ...) -> list[tuple[str, str]]: ... def parse_multipart( - fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = ..., errors: str = ..., separator: str = ... + fp: IO[Any], pdict: SupportsGetItem[str, bytes], encoding: str = "utf-8", errors: str = "replace", separator: str = "&" ) -> dict[str, list[Any]]: ... class _Environ(Protocol): @@ -51,7 +53,7 @@ def print_directory() -> None: ... def print_environ_usage() -> None: ... if sys.version_info < (3, 8): - def escape(s: str, quote: bool | None = ...) -> str: ... + def escape(s: str, quote: bool | None = None) -> str: ... class MiniFieldStorage: # The first five "Any" attributes here are always None, but mypy doesn't support that @@ -72,7 +74,7 @@ class FieldStorage: keep_blank_values: int strict_parsing: int qs_on_post: str | None - headers: Mapping[str, str] + headers: Mapping[str, str] | Message fp: IO[bytes] encoding: str errors: str @@ -92,24 +94,24 @@ class FieldStorage: value: None | bytes | _list[Any] def __init__( self, - fp: IO[Any] | None = ..., - headers: Mapping[str, str] | None = ..., - outerboundary: bytes = ..., + fp: IO[Any] | None = None, + headers: Mapping[str, str] | Message | None = None, + outerboundary: bytes = b"", environ: SupportsGetItem[str, str] = ..., - keep_blank_values: int = ..., - strict_parsing: int = ..., - limit: int | None = ..., - encoding: str = ..., - errors: str = ..., - max_num_fields: int | None = ..., - separator: str = ..., + keep_blank_values: int = 0, + strict_parsing: int = 0, + limit: int | None = None, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", ) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def __iter__(self) -> Iterator[str]: ... def __getitem__(self, key: str) -> Any: ... - def getvalue(self, key: str, default: Any = ...) -> Any: ... - def getfirst(self, key: str, default: Any = ...) -> Any: ... + def getvalue(self, key: str, default: Any = None) -> Any: ... + def getfirst(self, key: str, default: Any = None) -> Any: ... def getlist(self, key: str) -> _list[Any]: ... def keys(self) -> _list[str]: ... def __contains__(self, key: str) -> bool: ... @@ -119,9 +121,9 @@ class FieldStorage: def make_file(self) -> IO[Any]: ... def print_exception( - type: type[BaseException] | None = ..., - value: BaseException | None = ..., - tb: TracebackType | None = ..., - limit: int | None = ..., + type: type[BaseException] | None = None, + value: BaseException | None = None, + tb: TracebackType | None = None, + limit: int | None = None, ) -> None: ... def print_arguments() -> None: ... diff --git a/mypy/typeshed/stdlib/cgitb.pyi b/mypy/typeshed/stdlib/cgitb.pyi index ea5a8341bc5e..4c315bf6ca39 100644 --- a/mypy/typeshed/stdlib/cgitb.pyi +++ b/mypy/typeshed/stdlib/cgitb.pyi @@ -2,8 +2,9 @@ from _typeshed import OptExcInfo, StrOrBytesPath from collections.abc import Callable from types import FrameType, TracebackType from typing import IO, Any +from typing_extensions import Final -__UNDEF__: object # undocumented sentinel +__UNDEF__: Final[object] # undocumented sentinel def reset() -> str: ... # undocumented def small(text: str) -> str: ... # undocumented @@ -13,20 +14,20 @@ def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | N def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] ) -> list[tuple[str, str | None, Any]]: ... # undocumented -def html(einfo: OptExcInfo, context: int = ...) -> str: ... -def text(einfo: OptExcInfo, context: int = ...) -> str: ... +def html(einfo: OptExcInfo, context: int = 5) -> str: ... +def text(einfo: OptExcInfo, context: int = 5) -> str: ... class Hook: # undocumented def __init__( self, - display: int = ..., - logdir: StrOrBytesPath | None = ..., - context: int = ..., - file: IO[str] | None = ..., - format: str = ..., + display: int = 1, + logdir: StrOrBytesPath | None = None, + context: int = 5, + file: IO[str] | None = None, + format: str = "html", ) -> None: ... def __call__(self, etype: type[BaseException] | None, evalue: BaseException | None, etb: TracebackType | None) -> None: ... - def handle(self, info: OptExcInfo | None = ...) -> None: ... + def handle(self, info: OptExcInfo | None = None) -> None: ... -def handler(info: OptExcInfo | None = ...) -> None: ... -def enable(display: int = ..., logdir: StrOrBytesPath | None = ..., context: int = ..., format: str = ...) -> None: ... +def handler(info: OptExcInfo | None = None) -> None: ... +def enable(display: int = 1, logdir: StrOrBytesPath | None = None, context: int = 5, format: str = "html") -> None: ... diff --git a/mypy/typeshed/stdlib/chunk.pyi b/mypy/typeshed/stdlib/chunk.pyi index 50ff267c5436..9788d35f680c 100644 --- a/mypy/typeshed/stdlib/chunk.pyi +++ b/mypy/typeshed/stdlib/chunk.pyi @@ -9,12 +9,12 @@ class Chunk: size_read: int offset: int seekable: bool - def __init__(self, file: IO[bytes], align: bool = ..., bigendian: bool = ..., inclheader: bool = ...) -> None: ... + def __init__(self, file: IO[bytes], align: bool = True, bigendian: bool = True, inclheader: bool = False) -> None: ... def getname(self) -> bytes: ... def getsize(self) -> int: ... def close(self) -> None: ... def isatty(self) -> bool: ... - def seek(self, pos: int, whence: int = ...) -> None: ... + def seek(self, pos: int, whence: int = 0) -> None: ... def tell(self) -> int: ... - def read(self, size: int = ...) -> bytes: ... + def read(self, size: int = -1) -> bytes: ... def skip(self) -> None: ... diff --git a/mypy/typeshed/stdlib/cmath.pyi b/mypy/typeshed/stdlib/cmath.pyi index 30ada5d5b5ef..0a85600e99b7 100644 --- a/mypy/typeshed/stdlib/cmath.pyi +++ b/mypy/typeshed/stdlib/cmath.pyi @@ -27,7 +27,7 @@ def atanh(__z: _C) -> complex: ... def cos(__z: _C) -> complex: ... def cosh(__z: _C) -> complex: ... def exp(__z: _C) -> complex: ... -def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = ..., abs_tol: SupportsFloat = ...) -> bool: ... +def isclose(a: _C, b: _C, *, rel_tol: SupportsFloat = 1e-09, abs_tol: SupportsFloat = 0.0) -> bool: ... def isinf(__z: _C) -> bool: ... def isnan(__z: _C) -> bool: ... def log(__x: _C, __y_obj: _C = ...) -> complex: ... diff --git a/mypy/typeshed/stdlib/cmd.pyi b/mypy/typeshed/stdlib/cmd.pyi index ddefff2edf05..b658a873410b 100644 --- a/mypy/typeshed/stdlib/cmd.pyi +++ b/mypy/typeshed/stdlib/cmd.pyi @@ -23,9 +23,9 @@ class Cmd: stdout: IO[str] cmdqueue: list[str] completekey: str - def __init__(self, completekey: str = ..., stdin: IO[str] | None = ..., stdout: IO[str] | None = ...) -> None: ... + def __init__(self, completekey: str = "tab", stdin: IO[str] | None = None, stdout: IO[str] | None = None) -> None: ... old_completer: Callable[[str, int], str | None] | None - def cmdloop(self, intro: Any | None = ...) -> None: ... + def cmdloop(self, intro: Any | None = None) -> None: ... def precmd(self, line: str) -> str: ... def postcmd(self, stop: bool, line: str) -> bool: ... def preloop(self) -> None: ... @@ -43,4 +43,4 @@ class Cmd: def complete_help(self, *args: Any) -> list[str]: ... def do_help(self, arg: str) -> bool | None: ... def print_topics(self, header: str, cmds: list[str] | None, cmdlen: Any, maxcol: int) -> None: ... - def columnize(self, list: list[str] | None, displaywidth: int = ...) -> None: ... + def columnize(self, list: list[str] | None, displaywidth: int = 80) -> None: ... diff --git a/mypy/typeshed/stdlib/code.pyi b/mypy/typeshed/stdlib/code.pyi index 59318aa353e2..4715bd866ddc 100644 --- a/mypy/typeshed/stdlib/code.pyi +++ b/mypy/typeshed/stdlib/code.pyi @@ -8,26 +8,26 @@ __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_ class InteractiveInterpreter: locals: Mapping[str, Any] # undocumented compile: CommandCompiler # undocumented - def __init__(self, locals: Mapping[str, Any] | None = ...) -> None: ... - def runsource(self, source: str, filename: str = ..., symbol: str = ...) -> bool: ... + def __init__(self, locals: Mapping[str, Any] | None = None) -> None: ... + def runsource(self, source: str, filename: str = "", symbol: str = "single") -> bool: ... def runcode(self, code: CodeType) -> None: ... - def showsyntaxerror(self, filename: str | None = ...) -> None: ... + def showsyntaxerror(self, filename: str | None = None) -> None: ... def showtraceback(self) -> None: ... def write(self, data: str) -> None: ... class InteractiveConsole(InteractiveInterpreter): buffer: list[str] # undocumented filename: str # undocumented - def __init__(self, locals: Mapping[str, Any] | None = ..., filename: str = ...) -> None: ... - def interact(self, banner: str | None = ..., exitmsg: str | None = ...) -> None: ... + def __init__(self, locals: Mapping[str, Any] | None = None, filename: str = "") -> None: ... + def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None: ... def push(self, line: str) -> bool: ... def resetbuffer(self) -> None: ... - def raw_input(self, prompt: str = ...) -> str: ... + def raw_input(self, prompt: str = "") -> str: ... def interact( - banner: str | None = ..., - readfunc: Callable[[str], str] | None = ..., - local: Mapping[str, Any] | None = ..., - exitmsg: str | None = ..., + banner: str | None = None, + readfunc: Callable[[str], str] | None = None, + local: Mapping[str, Any] | None = None, + exitmsg: str | None = None, ) -> None: ... -def compile_command(source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... +def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index a7b60e38df11..5a22853b6aee 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -1,11 +1,11 @@ +import sys import types -from _typeshed import Self +from _codecs import * +from _typeshed import ReadableBuffer from abc import abstractmethod from collections.abc import Callable, Generator, Iterable from typing import Any, BinaryIO, Protocol, TextIO -from typing_extensions import Literal - -from _codecs import * +from typing_extensions import Literal, Self __all__ = [ "register", @@ -110,16 +110,16 @@ class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): def incrementaldecoder(self) -> _IncrementalDecoder: ... name: str def __new__( - cls: type[Self], + cls, encode: _Encoder, decode: _Decoder, - streamreader: _StreamReader | None = ..., - streamwriter: _StreamWriter | None = ..., - incrementalencoder: _IncrementalEncoder | None = ..., - incrementaldecoder: _IncrementalDecoder | None = ..., - name: str | None = ..., + streamreader: _StreamReader | None = None, + streamwriter: _StreamWriter | None = None, + incrementalencoder: _IncrementalEncoder | None = None, + incrementaldecoder: _IncrementalDecoder | None = None, + name: str | None = None, *, - _is_text_encoding: bool | None = ..., + _is_text_encoding: bool | None = None, ) -> Self: ... def getencoder(encoding: str) -> _Encoder: ... @@ -128,12 +128,20 @@ def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... def getreader(encoding: str) -> _StreamReader: ... def getwriter(encoding: str) -> _StreamWriter: ... -def open( - filename: str, mode: str = ..., encoding: str | None = ..., errors: str = ..., buffering: int = ... -) -> StreamReaderWriter: ... -def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = ..., errors: str = ...) -> StreamRecoder: ... -def iterencode(iterator: Iterable[str], encoding: str, errors: str = ...) -> Generator[bytes, None, None]: ... -def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = ...) -> Generator[str, None, None]: ... + +if sys.version_info >= (3, 8): + def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = -1 + ) -> StreamReaderWriter: ... + +else: + def open( + filename: str, mode: str = "r", encoding: str | None = None, errors: str = "strict", buffering: int = 1 + ) -> StreamReaderWriter: ... + +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = None, errors: str = "strict") -> StreamRecoder: ... +def iterencode(iterator: Iterable[str], encoding: str, errors: str = "strict") -> Generator[bytes, None, None]: ... +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = "strict") -> Generator[str, None, None]: ... BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` BOM_BE: Literal[b"\xfe\xff"] @@ -156,14 +164,14 @@ def namereplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... class Codec: # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. - def encode(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... - def decode(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... + def encode(self, input: str, errors: str = "strict") -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = "strict") -> tuple[str, int]: ... class IncrementalEncoder: errors: str - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def encode(self, input: str, final: bool = ...) -> bytes: ... + def encode(self, input: str, final: bool = False) -> bytes: ... def reset(self) -> None: ... # documentation says int but str is needed for the subclass. def getstate(self) -> int | str: ... @@ -171,9 +179,9 @@ class IncrementalEncoder: class IncrementalDecoder: errors: str - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def decode(self, input: bytes, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... def reset(self) -> None: ... def getstate(self) -> tuple[bytes, int]: ... def setstate(self, state: tuple[bytes, int]) -> None: ... @@ -181,42 +189,42 @@ class IncrementalDecoder: # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): buffer: str - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def _buffer_encode(self, input: str, errors: str, final: bool) -> bytes: ... - def encode(self, input: str, final: bool = ...) -> bytes: ... + def _buffer_encode(self, input: str, errors: str, final: bool) -> tuple[bytes, int]: ... + def encode(self, input: str, final: bool = False) -> bytes: ... class BufferedIncrementalDecoder(IncrementalDecoder): buffer: bytes - def __init__(self, errors: str = ...) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... @abstractmethod - def _buffer_decode(self, input: bytes, errors: str, final: bool) -> tuple[str, int]: ... - def decode(self, input: bytes, final: bool = ...) -> str: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, final: bool = False) -> str: ... # TODO: it is not possible to specify the requirement that all other # attributes and methods are passed-through from the stream. class StreamWriter(Codec): stream: _WritableStream errors: str - def __init__(self, stream: _WritableStream, errors: str = ...) -> None: ... + def __init__(self, stream: _WritableStream, errors: str = "strict") -> None: ... def write(self, object: str) -> None: ... def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... class StreamReader(Codec): stream: _ReadableStream errors: str - def __init__(self, stream: _ReadableStream, errors: str = ...) -> None: ... - def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> str: ... - def readline(self, size: int | None = ..., keepends: bool = ...) -> str: ... - def readlines(self, sizehint: int | None = ..., keepends: bool = ...) -> list[str]: ... + def __init__(self, stream: _ReadableStream, errors: str = "strict") -> None: ... + def read(self, size: int = -1, chars: int = -1, firstline: bool = False) -> str: ... + def readline(self, size: int | None = None, keepends: bool = True) -> str: ... + def readlines(self, sizehint: int | None = None, keepends: bool = True) -> list[str]: ... def reset(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> str: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... @@ -224,17 +232,17 @@ class StreamReader(Codec): # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): stream: _Stream - def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ...) -> None: ... - def read(self, size: int = ...) -> str: ... - def readline(self, size: int | None = ...) -> str: ... - def readlines(self, sizehint: int | None = ...) -> list[str]: ... + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = "strict") -> None: ... + def read(self, size: int = -1) -> str: ... + def readline(self, size: int | None = None) -> str: ... + def readlines(self, sizehint: int | None = None) -> list[str]: ... def __next__(self) -> str: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... - def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] - def __enter__(self: Self) -> Self: ... + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... # These methods don't actually exist directly, but they are needed to satisfy the TextIO @@ -251,20 +259,26 @@ class StreamReaderWriter(TextIO): class StreamRecoder(BinaryIO): def __init__( - self, stream: _Stream, encode: _Encoder, decode: _Decoder, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ... + self, + stream: _Stream, + encode: _Encoder, + decode: _Decoder, + Reader: _StreamReader, + Writer: _StreamWriter, + errors: str = "strict", ) -> None: ... - def read(self, size: int = ...) -> bytes: ... - def readline(self, size: int | None = ...) -> bytes: ... - def readlines(self, sizehint: int | None = ...) -> list[bytes]: ... + def read(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = None) -> bytes: ... + def readlines(self, sizehint: int | None = None) -> list[bytes]: ... def __next__(self) -> bytes: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def write(self, data: bytes) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[bytes]) -> None: ... def reset(self) -> None: ... def __getattr__(self, name: str) -> Any: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... - def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] + def seek(self, offset: int, whence: int = 0) -> None: ... # type: ignore[override] # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO # interface. At runtime, they are delegated through __getattr__. def close(self) -> None: ... diff --git a/mypy/typeshed/stdlib/codeop.pyi b/mypy/typeshed/stdlib/codeop.pyi index 1c00e13fd501..6a51b7786384 100644 --- a/mypy/typeshed/stdlib/codeop.pyi +++ b/mypy/typeshed/stdlib/codeop.pyi @@ -2,14 +2,12 @@ from types import CodeType __all__ = ["compile_command", "Compile", "CommandCompiler"] -def compile_command(source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... +def compile_command(source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... class Compile: flags: int - def __init__(self) -> None: ... def __call__(self, source: str, filename: str, symbol: str) -> CodeType: ... class CommandCompiler: compiler: Compile - def __init__(self) -> None: ... - def __call__(self, source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... + def __call__(self, source: str, filename: str = "", symbol: str = "single") -> CodeType | None: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 40cf999dfae1..893a289d3cb1 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -1,14 +1,26 @@ import sys from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT from typing import Any, Generic, NoReturn, TypeVar, overload -from typing_extensions import SupportsIndex, final +from typing_extensions import Self, SupportsIndex, final if sys.version_info >= (3, 9): from types import GenericAlias if sys.version_info >= (3, 10): - from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Reversible, Sequence + from collections.abc import ( + Callable, + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableMapping, + MutableSequence, + Reversible, + Sequence, + ValuesView, + ) else: from _collections_abc import * @@ -28,18 +40,18 @@ def namedtuple( typename: str, field_names: str | Iterable[str], *, - rename: bool = ..., - module: str | None = ..., - defaults: Iterable[Any] | None = ..., + rename: bool = False, + module: str | None = None, + defaults: Iterable[Any] | None = None, ) -> type[tuple[Any, ...]]: ... class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): data: dict[_KT, _VT] # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics @overload - def __init__(self, __dict: None = ...) -> None: ... + def __init__(self, __dict: None = None) -> None: ... @overload - def __init__(self: UserDict[str, _VT], __dict: None = ..., **kwargs: _VT) -> None: ... + def __init__(self: UserDict[str, _VT], __dict: None = None, **kwargs: _VT) -> None: ... @overload def __init__(self, __dict: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... @overload @@ -56,15 +68,15 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __delitem__(self, key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __contains__(self, key: object) -> bool: ... - def copy(self: Self) -> Self: ... - def __copy__(self: Self) -> Self: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... # `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`. # TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system. # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> UserDict[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> UserDict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... @@ -73,14 +85,14 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class UserList(MutableSequence[_T]): data: list[_T] @overload - def __init__(self, initlist: None = ...) -> None: ... + def __init__(self, initlist: None = None) -> None: ... @overload def __init__(self, initlist: Iterable[_T]) -> None: ... def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... @@ -93,32 +105,32 @@ class UserList(MutableSequence[_T]): @overload def __getitem__(self, i: SupportsIndex) -> _T: ... @overload - def __getitem__(self: Self, i: slice) -> Self: ... + def __getitem__(self, i: slice) -> Self: ... @overload def __setitem__(self, i: SupportsIndex, item: _T) -> None: ... @overload def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ... def __delitem__(self, i: SupportsIndex | slice) -> None: ... - def __add__(self: Self, other: Iterable[_T]) -> Self: ... - def __radd__(self: Self, other: Iterable[_T]) -> Self: ... - def __iadd__(self: Self, other: Iterable[_T]) -> Self: ... - def __mul__(self: Self, n: int) -> Self: ... - def __rmul__(self: Self, n: int) -> Self: ... - def __imul__(self: Self, n: int) -> Self: ... + def __add__(self, other: Iterable[_T]) -> Self: ... + def __radd__(self, other: Iterable[_T]) -> Self: ... + def __iadd__(self, other: Iterable[_T]) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __imul__(self, n: int) -> Self: ... def append(self, item: _T) -> None: ... def insert(self, i: int, item: _T) -> None: ... - def pop(self, i: int = ...) -> _T: ... + def pop(self, i: int = -1) -> _T: ... def remove(self, item: _T) -> None: ... - def copy(self: Self) -> Self: ... - def __copy__(self: Self) -> Self: ... + def copy(self) -> Self: ... + def __copy__(self) -> Self: ... def count(self, item: _T) -> int: ... # All arguments are passed to `list.index` at runtime, so the signature should be kept in line with `list.index`. - def index(self, item: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def index(self, item: _T, __start: SupportsIndex = 0, __stop: SupportsIndex = sys.maxsize) -> int: ... # All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`. @overload - def sort(self: UserList[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: UserList[SupportsRichComparisonT], *, key: None = None, reverse: bool = False) -> None: ... @overload - def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False) -> None: ... def extend(self, other: Iterable[_T]) -> None: ... class UserString(Sequence[UserString]): @@ -135,34 +147,34 @@ class UserString(Sequence[UserString]): def __eq__(self, string: object) -> bool: ... def __contains__(self, char: object) -> bool: ... def __len__(self) -> int: ... - def __getitem__(self: Self, index: SupportsIndex | slice) -> Self: ... - def __iter__(self: Self) -> Iterator[Self]: ... - def __reversed__(self: Self) -> Iterator[Self]: ... - def __add__(self: Self, other: object) -> Self: ... - def __radd__(self: Self, other: object) -> Self: ... - def __mul__(self: Self, n: int) -> Self: ... - def __rmul__(self: Self, n: int) -> Self: ... - def __mod__(self: Self, args: Any) -> Self: ... + def __getitem__(self, index: SupportsIndex | slice) -> Self: ... + def __iter__(self) -> Iterator[Self]: ... + def __reversed__(self) -> Iterator[Self]: ... + def __add__(self, other: object) -> Self: ... + def __radd__(self, other: object) -> Self: ... + def __mul__(self, n: int) -> Self: ... + def __rmul__(self, n: int) -> Self: ... + def __mod__(self, args: Any) -> Self: ... if sys.version_info >= (3, 8): - def __rmod__(self: Self, template: object) -> Self: ... + def __rmod__(self, template: object) -> Self: ... else: - def __rmod__(self: Self, format: Any) -> Self: ... + def __rmod__(self, format: Any) -> Self: ... - def capitalize(self: Self) -> Self: ... - def casefold(self: Self) -> Self: ... - def center(self: Self, width: int, *args: Any) -> Self: ... - def count(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def capitalize(self) -> Self: ... + def casefold(self) -> Self: ... + def center(self, width: int, *args: Any) -> Self: ... + def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... if sys.version_info >= (3, 8): - def encode(self: UserString, encoding: str | None = ..., errors: str | None = ...) -> bytes: ... + def encode(self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict") -> bytes: ... else: - def encode(self: Self, encoding: str | None = ..., errors: str | None = ...) -> Self: ... + def encode(self, encoding: str | None = None, errors: str | None = None) -> Self: ... - def endswith(self, suffix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... - def expandtabs(self: Self, tabsize: int = ...) -> Self: ... - def find(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def endswith(self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def expandtabs(self, tabsize: int = 8) -> Self: ... + def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... def format(self, *args: Any, **kwds: Any) -> str: ... def format_map(self, mapping: Mapping[str, Any]) -> str: ... - def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... + def index(self, sub: str, start: int = 0, end: int = sys.maxsize) -> int: ... def isalpha(self) -> bool: ... def isalnum(self) -> bool: ... def isdecimal(self) -> bool: ... @@ -176,68 +188,63 @@ class UserString(Sequence[UserString]): def isupper(self) -> bool: ... def isascii(self) -> bool: ... def join(self, seq: Iterable[str]) -> str: ... - def ljust(self: Self, width: int, *args: Any) -> Self: ... - def lower(self: Self) -> Self: ... - def lstrip(self: Self, chars: str | None = ...) -> Self: ... - @staticmethod - @overload - def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... - @staticmethod - @overload - def maketrans(x: str, y: str, z: str = ...) -> dict[int, int | None]: ... + def ljust(self, width: int, *args: Any) -> Self: ... + def lower(self) -> Self: ... + def lstrip(self, chars: str | None = None) -> Self: ... + maketrans = str.maketrans def partition(self, sep: str) -> tuple[str, str, str]: ... if sys.version_info >= (3, 9): - def removeprefix(self: Self, __prefix: str | UserString) -> Self: ... - def removesuffix(self: Self, __suffix: str | UserString) -> Self: ... + def removeprefix(self, __prefix: str | UserString) -> Self: ... + def removesuffix(self, __suffix: str | UserString) -> Self: ... - def replace(self: Self, old: str | UserString, new: str | UserString, maxsplit: int = ...) -> Self: ... - def rfind(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... - def rindex(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... - def rjust(self: Self, width: int, *args: Any) -> Self: ... + def replace(self, old: str | UserString, new: str | UserString, maxsplit: int = -1) -> Self: ... + def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ... + def rjust(self, width: int, *args: Any) -> Self: ... def rpartition(self, sep: str) -> tuple[str, str, str]: ... - def rstrip(self: Self, chars: str | None = ...) -> Self: ... - def split(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... - def rsplit(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... - def splitlines(self, keepends: bool = ...) -> list[str]: ... - def startswith(self, prefix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... - def strip(self: Self, chars: str | None = ...) -> Self: ... - def swapcase(self: Self) -> Self: ... - def title(self: Self) -> Self: ... - def translate(self: Self, *args: Any) -> Self: ... - def upper(self: Self) -> Self: ... - def zfill(self: Self, width: int) -> Self: ... + def rstrip(self, chars: str | None = None) -> Self: ... + def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ... + def splitlines(self, keepends: bool = False) -> list[str]: ... + def startswith(self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize) -> bool: ... + def strip(self, chars: str | None = None) -> Self: ... + def swapcase(self) -> Self: ... + def title(self) -> Self: ... + def translate(self, *args: Any) -> Self: ... + def upper(self) -> Self: ... + def zfill(self, width: int) -> Self: ... class deque(MutableSequence[_T], Generic[_T]): @property def maxlen(self) -> int | None: ... @overload - def __init__(self, *, maxlen: int | None = ...) -> None: ... + def __init__(self, *, maxlen: int | None = None) -> None: ... @overload - def __init__(self, iterable: Iterable[_T], maxlen: int | None = ...) -> None: ... + def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ... def append(self, __x: _T) -> None: ... def appendleft(self, __x: _T) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def count(self, __x: _T) -> int: ... def extend(self, __iterable: Iterable[_T]) -> None: ... def extendleft(self, __iterable: Iterable[_T]) -> None: ... def insert(self, __i: int, __x: _T) -> None: ... - def index(self, __x: _T, __start: int = ..., __stop: int = ...) -> int: ... + def index(self, __x: _T, __start: int = 0, __stop: int = ...) -> int: ... def pop(self) -> _T: ... # type: ignore[override] def popleft(self) -> _T: ... def remove(self, __value: _T) -> None: ... - def rotate(self, __n: int = ...) -> None: ... - def __copy__(self: Self) -> Self: ... + def rotate(self, __n: int = 1) -> None: ... + def __copy__(self) -> Self: ... def __len__(self) -> int: ... # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores def __getitem__(self, __index: SupportsIndex) -> _T: ... # type: ignore[override] def __setitem__(self, __i: SupportsIndex, __x: _T) -> None: ... # type: ignore[override] def __delitem__(self, __i: SupportsIndex) -> None: ... # type: ignore[override] def __contains__(self, __o: object) -> bool: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... - def __iadd__(self: Self, __iterable: Iterable[_T]) -> Self: ... - def __add__(self: Self, __other: Self) -> Self: ... - def __mul__(self: Self, __other: int) -> Self: ... - def __imul__(self: Self, __other: int) -> Self: ... + def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self, __iterable: Iterable[_T]) -> Self: ... + def __add__(self, __other: Self) -> Self: ... + def __mul__(self, __other: int) -> Self: ... + def __imul__(self, __other: int) -> Self: ... def __lt__(self, __other: deque[_T]) -> bool: ... def __le__(self, __other: deque[_T]) -> bool: ... def __gt__(self, __other: deque[_T]) -> bool: ... @@ -247,20 +254,20 @@ class deque(MutableSequence[_T], Generic[_T]): class Counter(dict[_T, int], Generic[_T]): @overload - def __init__(self, __iterable: None = ...) -> None: ... + def __init__(self, __iterable: None = None) -> None: ... @overload - def __init__(self: Counter[str], __iterable: None = ..., **kwargs: int) -> None: ... + def __init__(self: Counter[str], __iterable: None = None, **kwargs: int) -> None: ... @overload def __init__(self, __mapping: SupportsKeysAndGetItem[_T, int]) -> None: ... @overload def __init__(self, __iterable: Iterable[_T]) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def elements(self) -> Iterator[_T]: ... - def most_common(self, n: int | None = ...) -> list[tuple[_T, int]]: ... + def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ... @classmethod - def fromkeys(cls, iterable: Any, v: int | None = ...) -> NoReturn: ... # type: ignore[override] + def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override] @overload - def subtract(self, __iterable: None = ...) -> None: ... + def subtract(self, __iterable: None = None) -> None: ... @overload def subtract(self, __mapping: Mapping[_T, int]) -> None: ... @overload @@ -274,9 +281,9 @@ class Counter(dict[_T, int], Generic[_T]): @overload # type: ignore[override] def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... @overload - def update(self, __m: Iterable[_T], **kwargs: int) -> None: ... + def update(self, __iterable: Iterable[_T], **kwargs: int) -> None: ... @overload - def update(self, __m: None = ..., **kwargs: int) -> None: ... + def update(self, __iterable: None = None, **kwargs: int) -> None: ... def __missing__(self, key: _T) -> int: ... def __delitem__(self, elem: object) -> None: ... if sys.version_info >= (3, 10): @@ -290,10 +297,10 @@ class Counter(dict[_T, int], Generic[_T]): def __pos__(self) -> Counter[_T]: ... def __neg__(self) -> Counter[_T]: ... # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. - def __iadd__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[misc] - def __isub__(self: Self, other: Counter[_T]) -> Self: ... - def __iand__(self: Self, other: Counter[_T]) -> Self: ... - def __ior__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[override,misc] + def __iadd__(self, other: Counter[_T]) -> Self: ... # type: ignore[misc] + def __isub__(self, other: Counter[_T]) -> Self: ... + def __iand__(self, other: Counter[_T]) -> Self: ... + def __ior__(self, other: Counter[_T]) -> Self: ... # type: ignore[override,misc] if sys.version_info >= (3, 10): def total(self) -> int: ... def __le__(self, other: Counter[Any]) -> bool: ... @@ -301,38 +308,53 @@ class Counter(dict[_T, int], Generic[_T]): def __ge__(self, other: Counter[Any]) -> bool: ... def __gt__(self, other: Counter[Any]) -> bool: ... +# The pure-Python implementations of the "views" classes +# These are exposed at runtime in `collections/__init__.py` +class _OrderedDictKeysView(KeysView[_KT_co], Reversible[_KT_co]): + def __reversed__(self) -> Iterator[_KT_co]: ... + +class _OrderedDictItemsView(ItemsView[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]): + def __reversed__(self) -> Iterator[_VT_co]: ... + +# The C implementations of the "views" classes +# (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`, +# but they are not exposed anywhere) +# pyright doesn't have a specific error code for subclassing error! @final -class _OrderedDictKeysView(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] +class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[_KT_co]: ... @final -class _OrderedDictItemsView(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] +class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... @final -class _OrderedDictValuesView(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] +class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore def __reversed__(self) -> Iterator[_VT_co]: ... class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): - def popitem(self, last: bool = ...) -> tuple[_KT, _VT]: ... - def move_to_end(self, key: _KT, last: bool = ...) -> None: ... - def copy(self: Self) -> Self: ... + def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ... + def move_to_end(self, key: _KT, last: bool = True) -> None: ... + def copy(self) -> Self: ... def __reversed__(self) -> Iterator[_KT]: ... - def keys(self) -> _OrderedDictKeysView[_KT, _VT]: ... - def items(self) -> _OrderedDictItemsView[_KT, _VT]: ... - def values(self) -> _OrderedDictValuesView[_KT, _VT]: ... + def keys(self) -> _odict_keys[_KT, _VT]: ... + def items(self) -> _odict_items[_KT, _VT]: ... + def values(self) -> _odict_values[_KT, _VT]: ... # The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences. # Like dict.fromkeys, its true signature is not expressible in the current type system. # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> OrderedDict[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> OrderedDict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def setdefault(self: OrderedDict[_KT, _T | None], key: _KT) -> _T | None: ... + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... @overload def setdefault(self, key: _KT, default: _VT) -> _VT: ... @@ -365,15 +387,15 @@ class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): **kwargs: _VT, ) -> None: ... def __missing__(self, __key: _KT) -> _VT: ... - def __copy__(self: Self) -> Self: ... - def copy(self: Self) -> Self: ... + def __copy__(self) -> Self: ... + def copy(self) -> Self: ... class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): maps: list[MutableMapping[_KT, _VT]] def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... - def new_child(self: Self, m: MutableMapping[_KT, _VT] | None = ...) -> Self: ... + def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ... @property - def parents(self: Self) -> Self: ... + def parents(self) -> Self: ... def __setitem__(self, key: _KT, value: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... @@ -382,17 +404,21 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __contains__(self, key: object) -> bool: ... def __missing__(self, key: _KT) -> _VT: ... # undocumented def __bool__(self) -> bool: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + # Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: ChainMap[_KT, _T | None], key: _KT, default: None = None) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @overload - def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... - def copy(self: Self) -> Self: ... + def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def copy(self) -> Self: ... __copy__ = copy # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. @classmethod @overload - def fromkeys(cls, iterable: Iterable[_T], __value: None = ...) -> ChainMap[_T, Any | None]: ... + def fromkeys(cls, iterable: Iterable[_T], __value: None = None) -> ChainMap[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ... @@ -401,6 +427,6 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/mypy/typeshed/stdlib/compileall.pyi b/mypy/typeshed/stdlib/compileall.pyi index dd1de3f496e7..7520c2f5b676 100644 --- a/mypy/typeshed/stdlib/compileall.pyi +++ b/mypy/typeshed/stdlib/compileall.pyi @@ -8,70 +8,104 @@ __all__ = ["compile_dir", "compile_file", "compile_path"] class _SupportsSearch(Protocol): def search(self, string: str) -> Any: ... -if sys.version_info >= (3, 9): +if sys.version_info >= (3, 10): def compile_dir( dir: StrPath, - maxlevels: int | None = ..., - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - workers: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, *, - stripdir: str | None = ..., # TODO: change to StrPath | None once https://bugs.python.org/issue40447 is resolved - prependdir: StrPath | None = ..., - limit_sl_dest: StrPath | None = ..., - hardlink_dupes: bool = ..., + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, ) -> int: ... def compile_file( fullname: StrPath, - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, *, - stripdir: str | None = ..., # TODO: change to StrPath | None once https://bugs.python.org/issue40447 is resolved - prependdir: StrPath | None = ..., - limit_sl_dest: StrPath | None = ..., - hardlink_dupes: bool = ..., + stripdir: StrPath | None = None, + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> int: ... + +elif sys.version_info >= (3, 9): + def compile_dir( + dir: StrPath, + maxlevels: int | None = None, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, + ) -> int: ... + def compile_file( + fullname: StrPath, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + *, + stripdir: str | None = None, # https://bugs.python.org/issue40447 + prependdir: StrPath | None = None, + limit_sl_dest: StrPath | None = None, + hardlink_dupes: bool = False, ) -> int: ... else: def compile_dir( dir: StrPath, - maxlevels: int = ..., - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - workers: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int = 10, + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + workers: int = 1, + invalidation_mode: PycInvalidationMode | None = None, ) -> int: ... def compile_file( fullname: StrPath, - ddir: StrPath | None = ..., - force: bool = ..., - rx: _SupportsSearch | None = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + ddir: StrPath | None = None, + force: bool = False, + rx: _SupportsSearch | None = None, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, ) -> int: ... def compile_path( skip_curdir: bool = ..., - maxlevels: int = ..., - force: bool = ..., - quiet: int = ..., - legacy: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + maxlevels: int = 0, + force: bool = False, + quiet: int = 0, + legacy: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, ) -> int: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/_base.pyi b/mypy/typeshed/stdlib/concurrent/futures/_base.pyi index 3885abf8db91..e792cf1a83c0 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/_base.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/_base.pyi @@ -1,11 +1,11 @@ import sys import threading -from _typeshed import Self +from _typeshed import Unused from collections.abc import Callable, Iterable, Iterator, Sequence from logging import Logger from types import TracebackType from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal, ParamSpec, SupportsIndex +from typing_extensions import Literal, ParamSpec, Self, SupportsIndex if sys.version_info >= (3, 9): from types import GenericAlias @@ -35,16 +35,15 @@ _T = TypeVar("_T") _P = ParamSpec("_P") class Future(Generic[_T]): - def __init__(self) -> None: ... def cancel(self) -> bool: ... def cancelled(self) -> bool: ... def running(self) -> bool: ... def done(self) -> bool: ... def add_done_callback(self, fn: Callable[[Future[_T]], object]) -> None: ... - def result(self, timeout: float | None = ...) -> _T: ... + def result(self, timeout: float | None = None) -> _T: ... def set_running_or_notify_cancel(self) -> bool: ... def set_result(self, result: _T) -> None: ... - def exception(self, timeout: float | None = ...) -> BaseException | None: ... + def exception(self, timeout: float | None = None) -> BaseException | None: ... def set_exception(self, exception: BaseException | None) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -56,19 +55,19 @@ class Executor: def submit(self, fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... def map( - self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = ..., chunksize: int = ... + self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = None, chunksize: int = 1 ) -> Iterator[_T]: ... if sys.version_info >= (3, 9): - def shutdown(self, wait: bool = ..., *, cancel_futures: bool = ...) -> None: ... + def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: ... else: - def shutdown(self, wait: bool = ...) -> None: ... + def shutdown(self, wait: bool = True) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... -def as_completed(fs: Iterable[Future[_T]], timeout: float | None = ...) -> Iterator[Future[_T]]: ... +def as_completed(fs: Iterable[Future[_T]], timeout: float | None = None) -> Iterator[Future[_T]]: ... # Ideally this would be a namedtuple, but mypy doesn't support generic tuple types. See #1976 class DoneAndNotDoneFutures(Sequence[set[Future[_T]]]): @@ -85,39 +84,30 @@ class DoneAndNotDoneFutures(Sequence[set[Future[_T]]]): @overload def __getitem__(self, __s: slice) -> DoneAndNotDoneFutures[_T]: ... -def wait(fs: Iterable[Future[_T]], timeout: float | None = ..., return_when: str = ...) -> DoneAndNotDoneFutures[_T]: ... +def wait( + fs: Iterable[Future[_T]], timeout: float | None = None, return_when: str = "ALL_COMPLETED" +) -> DoneAndNotDoneFutures[_T]: ... class _Waiter: event: threading.Event finished_futures: list[Future[Any]] - def __init__(self) -> None: ... def add_result(self, future: Future[Any]) -> None: ... def add_exception(self, future: Future[Any]) -> None: ... def add_cancelled(self, future: Future[Any]) -> None: ... class _AsCompletedWaiter(_Waiter): lock: threading.Lock - def __init__(self) -> None: ... - def add_result(self, future: Future[Any]) -> None: ... - def add_exception(self, future: Future[Any]) -> None: ... - def add_cancelled(self, future: Future[Any]) -> None: ... -class _FirstCompletedWaiter(_Waiter): - def add_result(self, future: Future[Any]) -> None: ... - def add_exception(self, future: Future[Any]) -> None: ... - def add_cancelled(self, future: Future[Any]) -> None: ... +class _FirstCompletedWaiter(_Waiter): ... class _AllCompletedWaiter(_Waiter): num_pending_calls: int stop_on_exception: bool lock: threading.Lock def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ... - def add_result(self, future: Future[Any]) -> None: ... - def add_exception(self, future: Future[Any]) -> None: ... - def add_cancelled(self, future: Future[Any]) -> None: ... class _AcquireFutures: futures: Iterable[Future[Any]] def __init__(self, futures: Iterable[Future[Any]]) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/process.pyi b/mypy/typeshed/stdlib/concurrent/futures/process.pyi index 211107cf357d..85af2e7f84c7 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/process.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/process.pyi @@ -19,7 +19,6 @@ class _ThreadWakeup: _closed: bool _reader: Connection _writer: Connection - def __init__(self) -> None: ... def close(self) -> None: ... def wakeup(self) -> None: ... def clear(self) -> None: ... @@ -56,10 +55,10 @@ class _ResultItem: if sys.version_info >= (3, 11): exit_pid: int | None def __init__( - self, work_id: int, exception: Exception | None = ..., result: Any | None = ..., exit_pid: int | None = ... + self, work_id: int, exception: Exception | None = None, result: Any | None = None, exit_pid: int | None = None ) -> None: ... else: - def __init__(self, work_id: int, exception: Exception | None = ..., result: Any | None = ...) -> None: ... + def __init__(self, work_id: int, exception: Exception | None = None, result: Any | None = None) -> None: ... class _CallItem: work_id: int @@ -75,7 +74,7 @@ class _SafeQueue(Queue[Future[Any]]): if sys.version_info >= (3, 9): def __init__( self, - max_size: int | None = ..., + max_size: int | None = 0, *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]], @@ -84,7 +83,7 @@ class _SafeQueue(Queue[Future[Any]]): ) -> None: ... else: def __init__( - self, max_size: int | None = ..., *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] + self, max_size: int | None = 0, *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] ) -> None: ... def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... @@ -96,14 +95,14 @@ if sys.version_info >= (3, 11): def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, - result: Any | None = ..., - exception: Exception | None = ..., - exit_pid: int | None = ..., + result: Any | None = None, + exception: Exception | None = None, + exit_pid: int | None = None, ) -> None: ... else: def _sendback_result( - result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = ..., exception: Exception | None = ... + result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = None, exception: Exception | None = None ) -> None: ... if sys.version_info >= (3, 11): @@ -112,7 +111,7 @@ if sys.version_info >= (3, 11): result_queue: SimpleQueue[_ResultItem], initializer: Callable[..., object] | None, initargs: tuple[Any, ...], - max_tasks: int | None = ..., + max_tasks: int | None = None, ) -> None: ... else: @@ -172,19 +171,19 @@ class ProcessPoolExecutor(Executor): if sys.version_info >= (3, 11): def __init__( self, - max_workers: int | None = ..., - mp_context: BaseContext | None = ..., - initializer: Callable[..., object] | None = ..., + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[..., object] | None = None, initargs: tuple[Any, ...] = ..., *, - max_tasks_per_child: int | None = ..., + max_tasks_per_child: int | None = None, ) -> None: ... else: def __init__( self, - max_workers: int | None = ..., - mp_context: BaseContext | None = ..., - initializer: Callable[..., object] | None = ..., + max_workers: int | None = None, + mp_context: BaseContext | None = None, + initializer: Callable[..., object] | None = None, initargs: tuple[Any, ...] = ..., ) -> None: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/concurrent/futures/thread.pyi b/mypy/typeshed/stdlib/concurrent/futures/thread.pyi index 387ce0d7e438..e43dd3dfa33a 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/thread.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/thread.pyi @@ -50,9 +50,9 @@ class ThreadPoolExecutor(Executor): _work_queue: queue.SimpleQueue[_WorkItem[Any]] def __init__( self, - max_workers: int | None = ..., - thread_name_prefix: str = ..., - initializer: Callable[..., object] | None = ..., + max_workers: int | None = None, + thread_name_prefix: str = "", + initializer: Callable[..., object] | None = None, initargs: tuple[Any, ...] = ..., ) -> None: ... def _adjust_thread_count(self) -> None: ... diff --git a/mypy/typeshed/stdlib/configparser.pyi b/mypy/typeshed/stdlib/configparser.pyi index 00a23588b602..2c5b68385767 100644 --- a/mypy/typeshed/stdlib/configparser.pyi +++ b/mypy/typeshed/stdlib/configparser.pyi @@ -65,32 +65,48 @@ class RawConfigParser(_Parser): @overload def __init__( self, - defaults: Mapping[str, str | None] | None = ..., + defaults: Mapping[str, str | None] | None = None, dict_type: type[Mapping[str, str]] = ..., - allow_no_value: Literal[True] = ..., *, + allow_no_value: Literal[True], delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., - inline_comment_prefixes: Sequence[str] | None = ..., - strict: bool = ..., - empty_lines_in_values: bool = ..., - default_section: str = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", interpolation: Interpolation | None = ..., converters: _ConvertersMap = ..., ) -> None: ... @overload def __init__( self, - defaults: _Section | None = ..., + defaults: Mapping[str, str | None] | None, + dict_type: type[Mapping[str, str]], + allow_no_value: Literal[True], + *, + delimiters: Sequence[str] = ..., + comment_prefixes: Sequence[str] = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", + interpolation: Interpolation | None = ..., + converters: _ConvertersMap = ..., + ) -> None: ... + @overload + def __init__( + self, + defaults: _Section | None = None, dict_type: type[Mapping[str, str]] = ..., - allow_no_value: bool = ..., + allow_no_value: bool = False, *, delimiters: Sequence[str] = ..., comment_prefixes: Sequence[str] = ..., - inline_comment_prefixes: Sequence[str] | None = ..., - strict: bool = ..., - empty_lines_in_values: bool = ..., - default_section: str = ..., + inline_comment_prefixes: Sequence[str] | None = None, + strict: bool = True, + empty_lines_in_values: bool = True, + default_section: str = "DEFAULT", interpolation: Interpolation | None = ..., converters: _ConvertersMap = ..., ) -> None: ... @@ -106,30 +122,30 @@ class RawConfigParser(_Parser): def has_section(self, section: str) -> bool: ... def options(self, section: str) -> list[str]: ... def has_option(self, section: str, option: str) -> bool: ... - def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = ...) -> list[str]: ... - def read_file(self, f: Iterable[str], source: str | None = ...) -> None: ... - def read_string(self, string: str, source: str = ...) -> None: ... - def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = ...) -> None: ... - def readfp(self, fp: Iterable[str], filename: str | None = ...) -> None: ... + def read(self, filenames: StrOrBytesPath | Iterable[StrOrBytesPath], encoding: str | None = None) -> list[str]: ... + def read_file(self, f: Iterable[str], source: str | None = None) -> None: ... + def read_string(self, string: str, source: str = "") -> None: ... + def read_dict(self, dictionary: Mapping[str, Mapping[str, Any]], source: str = "") -> None: ... + def readfp(self, fp: Iterable[str], filename: str | None = None) -> None: ... # These get* methods are partially applied (with the same names) in # SectionProxy; the stubs should be kept updated together @overload - def getint(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> int: ... + def getint(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> int: ... @overload def getint( - self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T = ... + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> int | _T: ... @overload - def getfloat(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> float: ... + def getfloat(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> float: ... @overload def getfloat( - self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T = ... + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> float | _T: ... @overload - def getboolean(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> bool: ... + def getboolean(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> bool: ... @overload def getboolean( - self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T = ... + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T = ... ) -> bool | _T: ... def _get_conv( self, @@ -137,21 +153,23 @@ class RawConfigParser(_Parser): option: str, conv: Callable[[str], _T], *, - raw: bool = ..., - vars: _Section | None = ..., + raw: bool = False, + vars: _Section | None = None, fallback: _T = ..., ) -> _T: ... # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> str | Any: ... + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str | Any: ... @overload - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T) -> str | _T | Any: ... + def get( + self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T + ) -> str | _T | Any: ... @overload - def items(self, *, raw: bool = ..., vars: _Section | None = ...) -> ItemsView[str, SectionProxy]: ... + def items(self, *, raw: bool = False, vars: _Section | None = None) -> ItemsView[str, SectionProxy]: ... @overload - def items(self, section: str, raw: bool = ..., vars: _Section | None = ...) -> list[tuple[str, str]]: ... - def set(self, section: str, option: str, value: str | None = ...) -> None: ... - def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = ...) -> None: ... + def items(self, section: str, raw: bool = False, vars: _Section | None = None) -> list[tuple[str, str]]: ... + def set(self, section: str, option: str, value: str | None = None) -> None: ... + def write(self, fp: SupportsWrite[str], space_around_delimiters: bool = True) -> None: ... def remove_option(self, section: str, option: str) -> bool: ... def remove_section(self, section: str) -> bool: ... def optionxform(self, optionstr: str) -> str: ... @@ -159,9 +177,9 @@ class RawConfigParser(_Parser): class ConfigParser(RawConfigParser): # This is incompatible with MutableMapping so we ignore the type @overload # type: ignore[override] - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ...) -> str: ... + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None) -> str: ... @overload - def get(self, section: str, option: str, *, raw: bool = ..., vars: _Section | None = ..., fallback: _T) -> str | _T: ... + def get(self, section: str, option: str, *, raw: bool = False, vars: _Section | None = None, fallback: _T) -> str | _T: ... if sys.version_info < (3, 12): class SafeConfigParser(ConfigParser): ... # deprecated alias @@ -181,11 +199,11 @@ class SectionProxy(MutableMapping[str, str]): def get( # type: ignore[override] self, option: str, - fallback: str | None = ..., + fallback: str | None = None, *, - raw: bool = ..., - vars: _Section | None = ..., - _impl: Any | None = ..., + raw: bool = False, + vars: _Section | None = None, + _impl: Any | None = None, **kwargs: Any, ) -> str | Any: ... # can be None in RawConfigParser's sections # These are partially-applied version of the methods with the same names in @@ -216,7 +234,7 @@ class ConverterMapping(MutableMapping[str, _ConverterCallback | None]): class Error(Exception): message: str - def __init__(self, msg: str = ...) -> None: ... + def __init__(self, msg: str = "") -> None: ... class NoSectionError(Error): section: str @@ -226,14 +244,14 @@ class DuplicateSectionError(Error): section: str source: str | None lineno: int | None - def __init__(self, section: str, source: str | None = ..., lineno: int | None = ...) -> None: ... + def __init__(self, section: str, source: str | None = None, lineno: int | None = None) -> None: ... class DuplicateOptionError(Error): section: str option: str source: str | None lineno: int | None - def __init__(self, section: str, option: str, source: str | None = ..., lineno: int | None = ...) -> None: ... + def __init__(self, section: str, option: str, source: str | None = None, lineno: int | None = None) -> None: ... class NoOptionError(Error): section: str @@ -257,7 +275,7 @@ class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): source: str errors: list[tuple[int, str]] - def __init__(self, source: str | None = ..., filename: str | None = ...) -> None: ... + def __init__(self, source: str | None = None, filename: str | None = None) -> None: ... def append(self, lineno: int, line: str) -> None: ... class MissingSectionHeaderError(ParsingError): diff --git a/mypy/typeshed/stdlib/contextlib.pyi b/mypy/typeshed/stdlib/contextlib.pyi index 6a846ad618c3..feb43aabb039 100644 --- a/mypy/typeshed/stdlib/contextlib.pyi +++ b/mypy/typeshed/stdlib/contextlib.pyi @@ -1,11 +1,11 @@ import abc import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import FileDescriptorOrPath, Unused from abc import abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = [ "contextmanager", @@ -108,7 +108,7 @@ _SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) class closing(AbstractContextManager[_SupportsCloseT]): def __init__(self, thing: _SupportsCloseT) -> None: ... - def __exit__(self, *exc_info: object) -> None: ... + def __exit__(self, *exc_info: Unused) -> None: ... if sys.version_info >= (3, 10): class _SupportsAclose(Protocol): @@ -117,7 +117,7 @@ if sys.version_info >= (3, 10): class aclosing(AbstractAsyncContextManager[_SupportsAcloseT]): def __init__(self, thing: _SupportsAcloseT) -> None: ... - async def __aexit__(self, *exc_info: object) -> None: ... + async def __aexit__(self, *exc_info: Unused) -> None: ... class suppress(AbstractContextManager[None]): def __init__(self, *exceptions: type[BaseException]) -> None: ... @@ -137,24 +137,24 @@ class redirect_stderr(_RedirectStream[_T_io]): ... # In reality this is a subclass of `AbstractContextManager`; # see #7961 for why we don't do that in the stub class ExitStack(metaclass=abc.ABCMeta): - def __init__(self) -> None: ... def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... - def pop_all(self: Self) -> Self: ... + def pop_all(self) -> Self: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None ) -> bool: ... -_ExitCoroFunc: TypeAlias = Callable[[type[BaseException] | None, BaseException | None, TracebackType | None], Awaitable[bool]] +_ExitCoroFunc: TypeAlias = Callable[ + [type[BaseException] | None, BaseException | None, TracebackType | None], Awaitable[bool | None] +] _ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any] | _ExitCoroFunc) # In reality this is a subclass of `AbstractAsyncContextManager`; # see #7961 for why we don't do that in the stub class AsyncExitStack(metaclass=abc.ABCMeta): - def __init__(self) -> None: ... def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... async def enter_async_context(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... @@ -163,9 +163,9 @@ class AsyncExitStack(metaclass=abc.ABCMeta): def push_async_callback( self, __callback: Callable[_P, Awaitable[_T]], *args: _P.args, **kwds: _P.kwargs ) -> Callable[_P, Awaitable[_T]]: ... - def pop_all(self: Self) -> Self: ... + def pop_all(self) -> Self: ... async def aclose(self) -> None: ... - async def __aenter__(self: Self) -> Self: ... + async def __aenter__(self) -> Self: ... async def __aexit__( self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None ) -> bool: ... @@ -174,29 +174,29 @@ if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T], AbstractAsyncContextManager[_T]): enter_result: _T @overload - def __init__(self: nullcontext[None], enter_result: None = ...) -> None: ... + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @overload def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... def __enter__(self) -> _T: ... - def __exit__(self, *exctype: object) -> None: ... + def __exit__(self, *exctype: Unused) -> None: ... async def __aenter__(self) -> _T: ... - async def __aexit__(self, *exctype: object) -> None: ... + async def __aexit__(self, *exctype: Unused) -> None: ... else: class nullcontext(AbstractContextManager[_T]): enter_result: _T @overload - def __init__(self: nullcontext[None], enter_result: None = ...) -> None: ... + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... @overload def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... def __enter__(self) -> _T: ... - def __exit__(self, *exctype: object) -> None: ... + def __exit__(self, *exctype: Unused) -> None: ... if sys.version_info >= (3, 11): - _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=int | StrOrBytesPath) + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) class chdir(AbstractContextManager[None], Generic[_T_fd_or_any_path]): path: _T_fd_or_any_path def __init__(self, path: _T_fd_or_any_path) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *excinfo: object) -> None: ... + def __exit__(self, *excinfo: Unused) -> None: ... diff --git a/mypy/typeshed/stdlib/copy.pyi b/mypy/typeshed/stdlib/copy.pyi index b53f418b3930..f68965d3dc91 100644 --- a/mypy/typeshed/stdlib/copy.pyi +++ b/mypy/typeshed/stdlib/copy.pyi @@ -8,7 +8,7 @@ _T = TypeVar("_T") PyStringMap: Any # Note: memo and _nil are internal kwargs. -def deepcopy(x: _T, memo: dict[int, Any] | None = ..., _nil: Any = ...) -> _T: ... +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = ...) -> _T: ... def copy(x: _T) -> _T: ... class Error(Exception): ... diff --git a/mypy/typeshed/stdlib/copyreg.pyi b/mypy/typeshed/stdlib/copyreg.pyi index 4403550b587e..8f7fd957fc52 100644 --- a/mypy/typeshed/stdlib/copyreg.pyi +++ b/mypy/typeshed/stdlib/copyreg.pyi @@ -1,16 +1,16 @@ from collections.abc import Callable, Hashable -from typing import Any, SupportsInt, TypeVar, Union +from typing import Any, SupportsInt, TypeVar from typing_extensions import TypeAlias _T = TypeVar("_T") -_Reduce: TypeAlias = Union[tuple[Callable[..., _T], tuple[Any, ...]], tuple[Callable[..., _T], tuple[Any, ...], Any | None]] +_Reduce: TypeAlias = tuple[Callable[..., _T], tuple[Any, ...]] | tuple[Callable[..., _T], tuple[Any, ...], Any | None] __all__ = ["pickle", "constructor", "add_extension", "remove_extension", "clear_extension_cache"] def pickle( ob_type: type[_T], pickle_function: Callable[[_T], str | _Reduce[_T]], - constructor_ob: Callable[[_Reduce[_T]], _T] | None = ..., + constructor_ob: Callable[[_Reduce[_T]], _T] | None = None, ) -> None: ... def constructor(object: Callable[[_Reduce[_T]], _T]) -> None: ... def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: ... diff --git a/mypy/typeshed/stdlib/crypt.pyi b/mypy/typeshed/stdlib/crypt.pyi index 83ad45d5c155..1ad0a384eae7 100644 --- a/mypy/typeshed/stdlib/crypt.pyi +++ b/mypy/typeshed/stdlib/crypt.pyi @@ -8,5 +8,5 @@ if sys.platform != "win32": METHOD_SHA512: _Method METHOD_BLOWFISH: _Method methods: list[_Method] - def mksalt(method: _Method | None = ..., *, rounds: int | None = ...) -> str: ... - def crypt(word: str, salt: str | _Method | None = ...) -> str: ... + def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ... + def crypt(word: str, salt: str | _Method | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/csv.pyi b/mypy/typeshed/stdlib/csv.pyi index e9552c759c16..234b189fb3db 100644 --- a/mypy/typeshed/stdlib/csv.pyi +++ b/mypy/typeshed/stdlib/csv.pyi @@ -21,10 +21,10 @@ from _csv import ( unregister_dialect as unregister_dialect, writer as writer, ) -from _typeshed import Self, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.version_info >= (3, 8): from builtins import dict as _DictReadMapping @@ -60,24 +60,9 @@ __all__ = [ _T = TypeVar("_T") -class excel(Dialect): - delimiter: str - quotechar: str - doublequote: bool - skipinitialspace: bool - lineterminator: str - quoting: _QuotingType - -class excel_tab(excel): - delimiter: str - -class unix_dialect(Dialect): - delimiter: str - quotechar: str - doublequote: bool - skipinitialspace: bool - lineterminator: str - quoting: _QuotingType +class excel(Dialect): ... +class excel_tab(excel): ... +class unix_dialect(Dialect): ... class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): fieldnames: Sequence[_T] | None @@ -91,9 +76,9 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): self, f: Iterable[str], fieldnames: Sequence[_T], - restkey: str | None = ..., - restval: str | None = ..., - dialect: _DialectLike = ..., + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", *, delimiter: str = ..., quotechar: str | None = ..., @@ -108,10 +93,10 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): def __init__( self: DictReader[str], f: Iterable[str], - fieldnames: Sequence[str] | None = ..., - restkey: str | None = ..., - restval: str | None = ..., - dialect: _DialectLike = ..., + fieldnames: Sequence[str] | None = None, + restkey: str | None = None, + restval: str | None = None, + dialect: _DialectLike = "excel", *, delimiter: str = ..., quotechar: str | None = ..., @@ -122,7 +107,7 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T | Any, str | Any]]): quoting: _QuotingType = ..., strict: bool = ..., ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _DictReadMapping[_T | Any, str | Any]: ... if sys.version_info >= (3, 12): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -136,9 +121,9 @@ class DictWriter(Generic[_T]): self, f: SupportsWrite[str], fieldnames: Collection[_T], - restval: Any | None = ..., - extrasaction: Literal["raise", "ignore"] = ..., - dialect: _DialectLike = ..., + restval: Any | None = "", + extrasaction: Literal["raise", "ignore"] = "raise", + dialect: _DialectLike = "excel", *, delimiter: str = ..., quotechar: str | None = ..., @@ -161,6 +146,5 @@ class DictWriter(Generic[_T]): class Sniffer: preferred: list[str] - def __init__(self) -> None: ... - def sniff(self, sample: str, delimiters: str | None = ...) -> type[Dialect]: ... + def sniff(self, sample: str, delimiters: str | None = None) -> type[Dialect]: ... def has_header(self, sample: str) -> bool: ... diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 48694fc6cf8a..497e2f7db70b 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -1,9 +1,10 @@ import sys -from _typeshed import ReadableBuffer, Self, WriteableBuffer +from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL +from _typeshed import ReadableBuffer, WriteableBuffer from abc import abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence -from typing import Any, ClassVar, Generic, TypeVar, Union as _UnionT, overload -from typing_extensions import TypeAlias +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -12,8 +13,6 @@ _T = TypeVar("_T") _DLLT = TypeVar("_DLLT", bound=CDLL) _CT = TypeVar("_CT", bound=_CData) -RTLD_GLOBAL: int -RTLD_LOCAL: int DEFAULT_MODE: int class CDLL: @@ -27,14 +26,19 @@ class CDLL: self, name: str | None, mode: int = ..., - handle: int | None = ..., - use_errno: bool = ..., - use_last_error: bool = ..., - winmode: int | None = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, + winmode: int | None = None, ) -> None: ... else: def __init__( - self, name: str | None, mode: int = ..., handle: int | None = ..., use_errno: bool = ..., use_last_error: bool = ... + self, + name: str | None, + mode: int = ..., + handle: int | None = None, + use_errno: bool = False, + use_last_error: bool = False, ) -> None: ... def __getattr__(self, name: str) -> _NamedFuncPointer: ... @@ -65,29 +69,29 @@ class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls # might not be a Type[_CT]. However this can never actually happen, because the only class that # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. - def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] - def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] class _CData(metaclass=_CDataMeta): - _b_base: int + _b_base_: int _b_needsfree_: bool _objects: Mapping[Any, int] | None @classmethod - def from_buffer(cls: type[Self], source: WriteableBuffer, offset: int = ...) -> Self: ... + def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... @classmethod - def from_buffer_copy(cls: type[Self], source: ReadableBuffer, offset: int = ...) -> Self: ... + def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ... @classmethod - def from_address(cls: type[Self], address: int) -> Self: ... + def from_address(cls, address: int) -> Self: ... @classmethod - def from_param(cls: type[Self], obj: Any) -> Self | _CArgObject: ... + def from_param(cls, obj: Any) -> Self | _CArgObject: ... @classmethod - def in_dll(cls: type[Self], library: CDLL, name: str) -> Self: ... + def in_dll(cls, library: CDLL, name: str) -> Self: ... class _CanCastTo(_CData): ... class _PointerLike(_CanCastTo): ... _ECT: TypeAlias = Callable[[type[_CData] | None, _FuncPointer, tuple[_CData, ...]], _CData] -_PF: TypeAlias = _UnionT[tuple[int], tuple[int, str], tuple[int, str, Any]] +_PF: TypeAlias = tuple[int] | tuple[int, str] | tuple[int, str, Any] class _FuncPointer(_PointerLike, _CData): restype: type[_CData] | Callable[[int], Any] | None @@ -137,11 +141,11 @@ def byref(obj: _CData, offset: int = ...) -> _CArgObject: ... _CastT = TypeVar("_CastT", bound=_CanCastTo) def cast(obj: _CData | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... -def create_string_buffer(init: int | bytes, size: int | None = ...) -> Array[c_char]: ... +def create_string_buffer(init: int | bytes, size: int | None = None) -> Array[c_char]: ... c_buffer = create_string_buffer -def create_unicode_buffer(init: int | str, size: int | None = ...) -> Array[c_wchar]: ... +def create_unicode_buffer(init: int | str, size: int | None = None) -> Array[c_wchar]: ... if sys.platform == "win32": def DllCanUnloadNow() -> int: ... @@ -166,13 +170,10 @@ class _Pointer(Generic[_CT], _PointerLike, _CData): @overload def __init__(self, arg: _CT) -> None: ... @overload - def __getitem__(self, __i: int) -> _CT: ... - @overload - def __getitem__(self, __s: slice) -> list[_CT]: ... - @overload - def __setitem__(self, __i: int, __o: _CT) -> None: ... + def __getitem__(self, __i: int) -> Any: ... @overload - def __setitem__(self, __s: slice, __o: Iterable[_CT]) -> None: ... + def __getitem__(self, __s: slice) -> list[Any]: ... + def __setitem__(self, __i: int, __o: Any) -> None: ... def pointer(__arg: _CT) -> _Pointer[_CT]: ... def resize(obj: _CData, size: int) -> None: ... @@ -182,12 +183,12 @@ if sys.platform == "win32": def set_last_error(value: int) -> int: ... def sizeof(obj_or_type: _CData | type[_CData]) -> int: ... -def string_at(address: _CVoidConstPLike, size: int = ...) -> bytes: ... +def string_at(address: _CVoidConstPLike, size: int = -1) -> bytes: ... if sys.platform == "win32": - def WinError(code: int | None = ..., descr: str | None = ...) -> OSError: ... + def WinError(code: int | None = None, descr: str | None = None) -> OSError: ... -def wstring_at(address: _CVoidConstPLike, size: int = ...) -> str: ... +def wstring_at(address: _CVoidConstPLike, size: int = -1) -> str: ... class _SimpleCData(Generic[_T], _CData): value: _T @@ -198,7 +199,7 @@ class _SimpleCData(Generic[_T], _CData): class c_byte(_SimpleCData[int]): ... class c_char(_SimpleCData[bytes]): - def __init__(self, value: int | bytes = ...) -> None: ... + def __init__(self, value: int | bytes | bytearray = ...) -> None: ... class c_char_p(_PointerLike, _SimpleCData[bytes | None]): def __init__(self, value: int | bytes | None = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/curses/textpad.pyi b/mypy/typeshed/stdlib/curses/textpad.pyi index ad9983431fc7..4d28b4dfbcdc 100644 --- a/mypy/typeshed/stdlib/curses/textpad.pyi +++ b/mypy/typeshed/stdlib/curses/textpad.pyi @@ -7,7 +7,7 @@ if sys.platform != "win32": class Textbox: stripspaces: bool - def __init__(self, win: _CursesWindow, insert_mode: bool = ...) -> None: ... - def edit(self, validate: Callable[[int], int] | None = ...) -> str: ... + def __init__(self, win: _CursesWindow, insert_mode: bool = False) -> None: ... + def edit(self, validate: Callable[[int], int] | None = None) -> str: ... def do_command(self, ch: str | int) -> None: ... def gather(self) -> str: ... diff --git a/mypy/typeshed/stdlib/dataclasses.pyi b/mypy/typeshed/stdlib/dataclasses.pyi index 04ae771fc064..c02aaabe6196 100644 --- a/mypy/typeshed/stdlib/dataclasses.pyi +++ b/mypy/typeshed/stdlib/dataclasses.pyi @@ -1,10 +1,11 @@ import enum import sys import types +from _typeshed import DataclassInstance from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Mapping from typing import Any, Generic, Protocol, TypeVar, overload -from typing_extensions import Literal +from typing_extensions import Literal, TypeAlias, TypeGuard if sys.version_info >= (3, 9): from types import GenericAlias @@ -30,6 +31,8 @@ __all__ = [ if sys.version_info >= (3, 10): __all__ += ["KW_ONLY"] +_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) + # define _MISSING_TYPE as an enum within the type stubs, # even though that is not really its type at runtime # this allows us to use Literal[_MISSING_TYPE.MISSING] @@ -44,62 +47,68 @@ if sys.version_info >= (3, 10): class KW_ONLY: ... @overload -def asdict(obj: Any) -> dict[str, Any]: ... +def asdict(obj: DataclassInstance) -> dict[str, Any]: ... @overload -def asdict(obj: Any, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... +def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: Any) -> tuple[Any, ...]: ... +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... @overload -def astuple(obj: Any, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... +def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... if sys.version_info >= (3, 8): # cls argument is now positional-only @overload - def dataclass(__cls: type[_T]) -> type[_T]: ... - @overload def dataclass(__cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(__cls: type[_T]) -> type[_T]: ... else: - @overload - def dataclass(_cls: type[_T]) -> type[_T]: ... @overload def dataclass(_cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(_cls: type[_T]) -> type[_T]: ... if sys.version_info >= (3, 11): @overload def dataclass( *, - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., - weakref_slot: bool = ..., + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, ) -> Callable[[type[_T]], type[_T]]: ... elif sys.version_info >= (3, 10): @overload def dataclass( *, - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, ) -> Callable[[type[_T]], type[_T]]: ... else: @overload def dataclass( - *, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ... + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, ) -> Callable[[type[_T]], type[_T]]: ... # See https://github.com/python/mypy/issues/10750 @@ -152,32 +161,32 @@ if sys.version_info >= (3, 10): def field( *, default: _T, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, kw_only: bool = ..., ) -> _T: ... @overload def field( *, default_factory: Callable[[], _T], - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, kw_only: bool = ..., ) -> _T: ... @overload def field( *, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, kw_only: bool = ..., ) -> Any: ... @@ -186,38 +195,50 @@ else: def field( *, default: _T, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, ) -> _T: ... @overload def field( *, default_factory: Callable[[], _T], - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, ) -> _T: ... @overload def field( *, - init: bool = ..., - repr: bool = ..., - hash: bool | None = ..., - compare: bool = ..., - metadata: Mapping[Any, Any] | None = ..., + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, ) -> Any: ... -def fields(class_or_instance: Any) -> tuple[Field[Any], ...]: ... -def is_dataclass(obj: Any) -> bool: ... +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... +@overload +def is_dataclass(obj: DataclassInstance | type[DataclassInstance]) -> Literal[True]: ... +@overload +def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ... +@overload +def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ... class FrozenInstanceError(AttributeError): ... -class InitVar(Generic[_T]): +if sys.version_info >= (3, 9): + _InitVarMeta: TypeAlias = type +else: + class _InitVarMeta(type): + # Not used, instead `InitVar.__class_getitem__` is called. + def __getitem__(self, params: Any) -> InitVar[Any]: ... + +class InitVar(Generic[_T], metaclass=_InitVarMeta): type: Type[_T] def __init__(self, type: Type[_T]) -> None: ... if sys.version_info >= (3, 9): @@ -232,17 +253,17 @@ if sys.version_info >= (3, 11): fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, bases: tuple[type, ...] = ..., - namespace: dict[str, Any] | None = ..., - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., - weakref_slot: bool = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, ) -> type: ... elif sys.version_info >= (3, 10): @@ -251,16 +272,16 @@ elif sys.version_info >= (3, 10): fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, bases: tuple[type, ...] = ..., - namespace: dict[str, Any] | None = ..., - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., - match_args: bool = ..., - kw_only: bool = ..., - slots: bool = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, ) -> type: ... else: @@ -269,13 +290,13 @@ else: fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, bases: tuple[type, ...] = ..., - namespace: dict[str, Any] | None = ..., - init: bool = ..., - repr: bool = ..., - eq: bool = ..., - order: bool = ..., - unsafe_hash: bool = ..., - frozen: bool = ..., + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, ) -> type: ... -def replace(__obj: _T, **changes: Any) -> _T: ... +def replace(__obj: _DataclassT, **changes: Any) -> _DataclassT: ... diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index 780ee941baa5..4da5501ce76d 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -1,8 +1,8 @@ import sys -from _typeshed import Self +from abc import abstractmethod from time import struct_time -from typing import ClassVar, NamedTuple, NoReturn, SupportsAbs, TypeVar, overload -from typing_extensions import Literal, TypeAlias, final +from typing import ClassVar, NamedTuple, NoReturn, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias, final if sys.version_info >= (3, 11): __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR", "UTC") @@ -15,8 +15,11 @@ MINYEAR: Literal[1] MAXYEAR: Literal[9999] class tzinfo: + @abstractmethod def tzname(self, __dt: datetime | None) -> str | None: ... + @abstractmethod def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + @abstractmethod def dst(self, __dt: datetime | None) -> timedelta | None: ... def fromutc(self, __dt: datetime) -> datetime: ... @@ -29,6 +32,9 @@ class timezone(tzinfo): min: ClassVar[timezone] max: ClassVar[timezone] def __init__(self, offset: timedelta, name: str = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str: ... + def utcoffset(self, __dt: datetime | None) -> timedelta: ... + def dst(self, __dt: datetime | None) -> None: ... if sys.version_info >= (3, 11): UTC: timezone @@ -43,18 +49,18 @@ class date: min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] - def __new__(cls: type[Self], year: int, month: int, day: int) -> Self: ... + def __new__(cls, year: int, month: int, day: int) -> Self: ... @classmethod - def fromtimestamp(cls: type[Self], __timestamp: float) -> Self: ... + def fromtimestamp(cls, __timestamp: float) -> Self: ... @classmethod - def today(cls: type[Self]) -> Self: ... + def today(cls) -> Self: ... @classmethod - def fromordinal(cls: type[Self], __n: int) -> Self: ... + def fromordinal(cls, __n: int) -> Self: ... @classmethod - def fromisoformat(cls: type[Self], __date_string: str) -> Self: ... + def fromisoformat(cls, __date_string: str) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def fromisocalendar(cls: type[Self], year: int, week: int, day: int) -> Self: ... + def fromisocalendar(cls, year: int, week: int, day: int) -> Self: ... @property def year(self) -> int: ... @@ -63,21 +69,28 @@ class date: @property def day(self) -> int: ... def ctime(self) -> str: ... - def strftime(self, __format: str) -> str: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... def toordinal(self) -> int: ... - def replace(self: Self, year: int = ..., month: int = ..., day: int = ...) -> Self: ... + def replace(self, year: int = ..., month: int = ..., day: int = ...) -> Self: ... def __le__(self, __other: date) -> bool: ... def __lt__(self, __other: date) -> bool: ... def __ge__(self, __other: date) -> bool: ... def __gt__(self, __other: date) -> bool: ... if sys.version_info >= (3, 8): - def __add__(self: Self, __other: timedelta) -> Self: ... - def __radd__(self: Self, __other: timedelta) -> Self: ... + def __add__(self, __other: timedelta) -> Self: ... + def __radd__(self, __other: timedelta) -> Self: ... @overload - def __sub__(self: Self, __other: timedelta) -> Self: ... + def __sub__(self, __other: timedelta) -> Self: ... @overload def __sub__(self, __other: datetime) -> NoReturn: ... @overload @@ -105,7 +118,7 @@ class time: max: ClassVar[time] resolution: ClassVar[timedelta] def __new__( - cls: type[Self], + cls, hour: int = ..., minute: int = ..., second: int = ..., @@ -132,14 +145,21 @@ class time: def __gt__(self, __other: time) -> bool: ... def isoformat(self, timespec: str = ...) -> str: ... @classmethod - def fromisoformat(cls: type[Self], __time_string: str) -> Self: ... - def strftime(self, __format: str) -> str: ... + def fromisoformat(cls, __time_string: str) -> Self: ... + # On <3.12, the name of the parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + def strftime(self, format: str) -> str: ... + else: + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... def dst(self) -> timedelta | None: ... def replace( - self: Self, + self, hour: int = ..., minute: int = ..., second: int = ..., @@ -152,12 +172,12 @@ class time: _Date: TypeAlias = date _Time: TypeAlias = time -class timedelta(SupportsAbs[timedelta]): +class timedelta: min: ClassVar[timedelta] max: ClassVar[timedelta] resolution: ClassVar[timedelta] def __new__( - cls: type[Self], + cls, days: float = ..., seconds: float = ..., microseconds: float = ..., @@ -201,9 +221,8 @@ class timedelta(SupportsAbs[timedelta]): class datetime(date): min: ClassVar[datetime] max: ClassVar[datetime] - resolution: ClassVar[timedelta] def __new__( - cls: type[Self], + cls, year: int, month: int, day: int, @@ -227,37 +246,40 @@ class datetime(date): def tzinfo(self) -> _TzInfo | None: ... @property def fold(self) -> int: ... - # The first parameter in `fromtimestamp` is actually positional-or-keyword, - # but it is named "timestamp" in the C implementation and "t" in the Python implementation, - # so it is only truly *safe* to pass it as a positional argument. - @classmethod - def fromtimestamp(cls: type[Self], __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + # On <3.12, the name of the first parameter in the pure-Python implementation + # didn't match the name in the C implementation, + # meaning it is only *safe* to pass it as a keyword argument on 3.12+ + if sys.version_info >= (3, 12): + @classmethod + def fromtimestamp(cls, timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + else: + @classmethod + def fromtimestamp(cls, __timestamp: float, tz: _TzInfo | None = ...) -> Self: ... + @classmethod - def utcfromtimestamp(cls: type[Self], __t: float) -> Self: ... + def utcfromtimestamp(cls, __t: float) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def now(cls: type[Self], tz: _TzInfo | None = ...) -> Self: ... + def now(cls, tz: _TzInfo | None = None) -> Self: ... else: @overload @classmethod - def now(cls: type[Self], tz: None = ...) -> Self: ... + def now(cls, tz: None = None) -> Self: ... @overload @classmethod def now(cls, tz: _TzInfo) -> datetime: ... @classmethod - def utcnow(cls: type[Self]) -> Self: ... + def utcnow(cls) -> Self: ... @classmethod - def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> datetime: ... - @classmethod - def fromisoformat(cls: type[Self], __date_string: str) -> Self: ... + def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ... def timestamp(self) -> float: ... def utctimetuple(self) -> struct_time: ... def date(self) -> _Date: ... def time(self) -> _Time: ... def timetz(self) -> _Time: ... def replace( - self: Self, + self, year: int = ..., month: int = ..., day: int = ..., @@ -270,14 +292,13 @@ class datetime(date): fold: int = ..., ) -> Self: ... if sys.version_info >= (3, 8): - def astimezone(self: Self, tz: _TzInfo | None = ...) -> Self: ... + def astimezone(self, tz: _TzInfo | None = ...) -> Self: ... else: def astimezone(self, tz: _TzInfo | None = ...) -> datetime: ... - def ctime(self) -> str: ... def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ... @classmethod - def strptime(cls, __date_string: str, __format: str) -> datetime: ... + def strptime(cls, __date_string: str, __format: str) -> Self: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... def dst(self) -> timedelta | None: ... @@ -287,7 +308,7 @@ class datetime(date): def __gt__(self, __other: datetime) -> bool: ... # type: ignore[override] if sys.version_info >= (3, 8): @overload # type: ignore[override] - def __sub__(self: Self, __other: timedelta) -> Self: ... + def __sub__(self, __other: timedelta) -> Self: ... @overload def __sub__(self: _D, __other: _D) -> timedelta: ... else: @@ -298,7 +319,3 @@ class datetime(date): def __sub__(self, __other: datetime) -> timedelta: ... @overload def __sub__(self, __other: timedelta) -> datetime: ... - if sys.version_info >= (3, 9): - def isocalendar(self) -> _IsoCalendarDate: ... - else: - def isocalendar(self) -> tuple[int, int, int]: ... diff --git a/mypy/typeshed/stdlib/dbm/__init__.pyi b/mypy/typeshed/stdlib/dbm/__init__.pyi index 9e99f0d5e74c..0068d67b6ad1 100644 --- a/mypy/typeshed/stdlib/dbm/__init__.pyi +++ b/mypy/typeshed/stdlib/dbm/__init__.pyi @@ -1,12 +1,11 @@ -from _typeshed import Self from collections.abc import Iterator, MutableMapping from types import TracebackType -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = ["open", "whichdb", "error"] _KeyType: TypeAlias = str | bytes -_ValueType: TypeAlias = str | bytes +_ValueType: TypeAlias = str | bytes | bytearray _TFlags: TypeAlias = Literal[ "r", "w", @@ -82,7 +81,7 @@ class _Database(MutableMapping[_KeyType, bytes]): def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -92,4 +91,4 @@ class _error(Exception): ... error: tuple[type[_error], type[OSError]] def whichdb(filename: str) -> str: ... -def open(file: str, flag: _TFlags = ..., mode: int = ...) -> _Database: ... +def open(file: str, flag: _TFlags = "r", mode: int = 0o666) -> _Database: ... diff --git a/mypy/typeshed/stdlib/dbm/dumb.pyi b/mypy/typeshed/stdlib/dbm/dumb.pyi index 4fd199f19728..1fc68cf71f9d 100644 --- a/mypy/typeshed/stdlib/dbm/dumb.pyi +++ b/mypy/typeshed/stdlib/dbm/dumb.pyi @@ -1,7 +1,6 @@ -from _typeshed import Self from collections.abc import Iterator, MutableMapping from types import TracebackType -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = ["error", "open"] @@ -10,8 +9,11 @@ _ValueType: TypeAlias = str | bytes error = OSError +# This class doesn't exist at runtime. open() can return an instance of +# any of the three implementations of dbm (dumb, gnu, ndbm), and this +# class is intended to represent the common interface supported by all three. class _Database(MutableMapping[_KeyType, bytes]): - def __init__(self, filebasename: str, mode: str, flag: str = ...) -> None: ... + def __init__(self, filebasename: str, mode: str, flag: str = "c") -> None: ... def sync(self) -> None: ... def iterkeys(self) -> Iterator[bytes]: ... # undocumented def close(self) -> None: ... @@ -21,9 +23,9 @@ class _Database(MutableMapping[_KeyType, bytes]): def __iter__(self) -> Iterator[bytes]: ... def __len__(self) -> int: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... -def open(file: str, flag: str = ..., mode: int = ...) -> _Database: ... +def open(file: str, flag: str = "c", mode: int = 0o666) -> _Database: ... diff --git a/mypy/typeshed/stdlib/dbm/gnu.pyi b/mypy/typeshed/stdlib/dbm/gnu.pyi index 561206c4e0be..3dc66a30c370 100644 --- a/mypy/typeshed/stdlib/dbm/gnu.pyi +++ b/mypy/typeshed/stdlib/dbm/gnu.pyi @@ -1,13 +1,13 @@ import sys -from _typeshed import Self +from _typeshed import ReadOnlyBuffer from types import TracebackType from typing import TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias if sys.platform != "win32": _T = TypeVar("_T") - _KeyType: TypeAlias = str | bytes - _ValueType: TypeAlias = str | bytes + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer open_flags: str @@ -24,17 +24,17 @@ if sys.platform != "win32": def __delitem__(self, key: _KeyType) -> None: ... def __contains__(self, key: _KeyType) -> bool: ... def __len__(self) -> int: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @overload def get(self, k: _KeyType) -> bytes | None: ... @overload - def get(self, k: _KeyType, default: bytes | _T) -> bytes | _T: ... + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - def open(__filename: str, __flags: str = ..., __mode: int = ...) -> _gdbm: ... + def open(__filename: str, __flags: str = "r", __mode: int = 0o666) -> _gdbm: ... diff --git a/mypy/typeshed/stdlib/dbm/ndbm.pyi b/mypy/typeshed/stdlib/dbm/ndbm.pyi index f1032bf3cae7..1106fb2a8e7e 100644 --- a/mypy/typeshed/stdlib/dbm/ndbm.pyi +++ b/mypy/typeshed/stdlib/dbm/ndbm.pyi @@ -1,13 +1,13 @@ import sys -from _typeshed import Self +from _typeshed import ReadOnlyBuffer from types import TracebackType from typing import TypeVar, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias if sys.platform != "win32": _T = TypeVar("_T") - _KeyType: TypeAlias = str | bytes - _ValueType: TypeAlias = str | bytes + _KeyType: TypeAlias = str | ReadOnlyBuffer + _ValueType: TypeAlias = str | ReadOnlyBuffer class error(OSError): ... library: str @@ -20,17 +20,17 @@ if sys.platform != "win32": def __delitem__(self, key: _KeyType) -> None: ... def __len__(self) -> int: ... def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @overload def get(self, k: _KeyType) -> bytes | None: ... @overload - def get(self, k: _KeyType, default: bytes | _T) -> bytes | _T: ... + def get(self, k: _KeyType, default: _T) -> bytes | _T: ... def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime __new__: None # type: ignore[assignment] __init__: None # type: ignore[assignment] - def open(__filename: str, __flags: str = ..., __mode: int = ...) -> _dbm: ... + def open(__filename: str, __flags: str = "r", __mode: int = 0o666) -> _dbm: ... diff --git a/mypy/typeshed/stdlib/difflib.pyi b/mypy/typeshed/stdlib/difflib.pyi index 854a53d433ae..310519602695 100644 --- a/mypy/typeshed/stdlib/difflib.pyi +++ b/mypy/typeshed/stdlib/difflib.pyi @@ -29,28 +29,28 @@ class Match(NamedTuple): class SequenceMatcher(Generic[_T]): @overload - def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = ...) -> None: ... + def __init__(self, isjunk: Callable[[_T], bool] | None, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload - def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = ...) -> None: ... + def __init__(self, *, a: Sequence[_T], b: Sequence[_T], autojunk: bool = True) -> None: ... @overload def __init__( self: SequenceMatcher[str], - isjunk: Callable[[str], bool] | None = ..., - a: Sequence[str] = ..., - b: Sequence[str] = ..., - autojunk: bool = ..., + isjunk: Callable[[str], bool] | None = None, + a: Sequence[str] = "", + b: Sequence[str] = "", + autojunk: bool = True, ) -> None: ... def set_seqs(self, a: Sequence[_T], b: Sequence[_T]) -> None: ... def set_seq1(self, a: Sequence[_T]) -> None: ... def set_seq2(self, b: Sequence[_T]) -> None: ... if sys.version_info >= (3, 9): - def find_longest_match(self, alo: int = ..., ahi: int | None = ..., blo: int = ..., bhi: int | None = ...) -> Match: ... + def find_longest_match(self, alo: int = 0, ahi: int | None = None, blo: int = 0, bhi: int | None = None) -> Match: ... else: def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Match: ... def get_matching_blocks(self) -> list[Match]: ... def get_opcodes(self) -> list[tuple[str, int, int, int, int]]: ... - def get_grouped_opcodes(self, n: int = ...) -> Iterable[list[tuple[str, int, int, int, int]]]: ... + def get_grouped_opcodes(self, n: int = 3) -> Iterable[list[tuple[str, int, int, int, int]]]: ... def ratio(self) -> float: ... def quick_ratio(self) -> float: ... def real_quick_ratio(self) -> float: ... @@ -59,80 +59,83 @@ class SequenceMatcher(Generic[_T]): # mypy thinks the signatures of the overloads overlap, but the types still work fine @overload -def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = ..., cutoff: float = ...) -> list[AnyStr]: ... # type: ignore[misc] +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = 3, cutoff: float = 0.6) -> list[AnyStr]: ... # type: ignore[misc] @overload def get_close_matches( - word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = ..., cutoff: float = ... + word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = 3, cutoff: float = 0.6 ) -> list[Sequence[_T]]: ... class Differ: - def __init__(self, linejunk: Callable[[str], bool] | None = ..., charjunk: Callable[[str], bool] | None = ...) -> None: ... + def __init__(self, linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = None) -> None: ... def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... def IS_LINE_JUNK(line: str, pat: Any = ...) -> bool: ... # pat is undocumented -def IS_CHARACTER_JUNK(ch: str, ws: str = ...) -> bool: ... # ws is undocumented +def IS_CHARACTER_JUNK(ch: str, ws: str = " \t") -> bool: ... # ws is undocumented def unified_diff( a: Sequence[str], b: Sequence[str], - fromfile: str = ..., - tofile: str = ..., - fromfiledate: str = ..., - tofiledate: str = ..., - n: int = ..., - lineterm: str = ..., + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", ) -> Iterator[str]: ... def context_diff( a: Sequence[str], b: Sequence[str], - fromfile: str = ..., - tofile: str = ..., - fromfiledate: str = ..., - tofiledate: str = ..., - n: int = ..., - lineterm: str = ..., + fromfile: str = "", + tofile: str = "", + fromfiledate: str = "", + tofiledate: str = "", + n: int = 3, + lineterm: str = "\n", ) -> Iterator[str]: ... def ndiff( - a: Sequence[str], b: Sequence[str], linejunk: Callable[[str], bool] | None = ..., charjunk: Callable[[str], bool] | None = ... + a: Sequence[str], + b: Sequence[str], + linejunk: Callable[[str], bool] | None = None, + charjunk: Callable[[str], bool] | None = ..., ) -> Iterator[str]: ... class HtmlDiff: def __init__( self, - tabsize: int = ..., - wrapcolumn: int | None = ..., - linejunk: Callable[[str], bool] | None = ..., + tabsize: int = 8, + wrapcolumn: int | None = None, + linejunk: Callable[[str], bool] | None = None, charjunk: Callable[[str], bool] | None = ..., ) -> None: ... def make_file( self, fromlines: Sequence[str], tolines: Sequence[str], - fromdesc: str = ..., - todesc: str = ..., - context: bool = ..., - numlines: int = ..., + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, *, - charset: str = ..., + charset: str = "utf-8", ) -> str: ... def make_table( self, fromlines: Sequence[str], tolines: Sequence[str], - fromdesc: str = ..., - todesc: str = ..., - context: bool = ..., - numlines: int = ..., + fromdesc: str = "", + todesc: str = "", + context: bool = False, + numlines: int = 5, ) -> str: ... def restore(delta: Iterable[str], which: int) -> Iterator[str]: ... def diff_bytes( dfunc: Callable[[Sequence[str], Sequence[str], str, str, str, str, int, str], Iterator[str]], - a: Sequence[bytes], - b: Sequence[bytes], - fromfile: bytes = ..., - tofile: bytes = ..., - fromfiledate: bytes = ..., - tofiledate: bytes = ..., - n: int = ..., - lineterm: bytes = ..., + a: Iterable[bytes | bytearray], + b: Iterable[bytes | bytearray], + fromfile: bytes | bytearray = b"", + tofile: bytes | bytearray = b"", + fromfiledate: bytes | bytearray = b"", + tofiledate: bytes | bytearray = b"", + n: int = 3, + lineterm: bytes | bytearray = b"\n", ) -> Iterator[bytes]: ... diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index dd31d981071f..ac0c5356f5f9 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -1,10 +1,9 @@ import sys import types -from _typeshed import Self from collections.abc import Callable, Iterator from opcode import * # `dis` re-exports it as a part of public API from typing import IO, Any, NamedTuple -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = [ "code_info", @@ -37,7 +36,6 @@ __all__ = [ # Strictly this should not have to include Callable, but mypy doesn't use FunctionType # for functions (python/mypy#3171) _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeType | type | Callable[..., Any] -_HaveCodeOrStringType: TypeAlias = _HaveCodeType | str | bytes if sys.version_info >= (3, 11): class Positions(NamedTuple): @@ -75,23 +73,21 @@ class Bytecode: if sys.version_info >= (3, 11): def __init__( self, - x: _HaveCodeOrStringType, + x: _HaveCodeType | str, *, - first_line: int | None = ..., - current_offset: int | None = ..., - show_caches: bool = ..., - adaptive: bool = ..., + first_line: int | None = None, + current_offset: int | None = None, + show_caches: bool = False, + adaptive: bool = False, ) -> None: ... @classmethod - def from_traceback( - cls: type[Self], tb: types.TracebackType, *, show_caches: bool = ..., adaptive: bool = ... - ) -> Self: ... + def from_traceback(cls, tb: types.TracebackType, *, show_caches: bool = False, adaptive: bool = False) -> Self: ... else: def __init__( - self, x: _HaveCodeOrStringType, *, first_line: int | None = ..., current_offset: int | None = ... + self, x: _HaveCodeType | str, *, first_line: int | None = None, current_offset: int | None = None ) -> None: ... @classmethod - def from_traceback(cls: type[Self], tb: types.TracebackType) -> Self: ... + def from_traceback(cls, tb: types.TracebackType) -> Self: ... def __iter__(self) -> Iterator[Instruction]: ... def info(self) -> str: ... @@ -102,39 +98,41 @@ COMPILER_FLAG_NAMES: dict[int, str] def findlabels(code: _HaveCodeType) -> list[int]: ... def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... def pretty_flags(flags: int) -> str: ... -def code_info(x: _HaveCodeOrStringType) -> str: ... +def code_info(x: _HaveCodeType | str) -> str: ... if sys.version_info >= (3, 11): def dis( - x: _HaveCodeOrStringType | None = ..., + x: _HaveCodeType | str | bytes | bytearray | None = None, *, - file: IO[str] | None = ..., - depth: int | None = ..., - show_caches: bool = ..., - adaptive: bool = ..., + file: IO[str] | None = None, + depth: int | None = None, + show_caches: bool = False, + adaptive: bool = False, ) -> None: ... else: - def dis(x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ..., depth: int | None = ...) -> None: ... + def dis( + x: _HaveCodeType | str | bytes | bytearray | None = None, *, file: IO[str] | None = None, depth: int | None = None + ) -> None: ... if sys.version_info >= (3, 11): def disassemble( - co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... def disco( - co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... def distb( - tb: types.TracebackType | None = ..., *, file: IO[str] | None = ..., show_caches: bool = ..., adaptive: bool = ... + tb: types.TracebackType | None = None, *, file: IO[str] | None = None, show_caches: bool = False, adaptive: bool = False ) -> None: ... def get_instructions( - x: _HaveCodeType, *, first_line: int | None = ..., show_caches: bool = ..., adaptive: bool = ... + x: _HaveCodeType, *, first_line: int | None = None, show_caches: bool = False, adaptive: bool = False ) -> Iterator[Instruction]: ... else: - def disassemble(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... - def disco(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... - def distb(tb: types.TracebackType | None = ..., *, file: IO[str] | None = ...) -> None: ... - def get_instructions(x: _HaveCodeType, *, first_line: int | None = ...) -> Iterator[Instruction]: ... + def disassemble(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def disco(co: _HaveCodeType, lasti: int = -1, *, file: IO[str] | None = None) -> None: ... + def distb(tb: types.TracebackType | None = None, *, file: IO[str] | None = None) -> None: ... + def get_instructions(x: _HaveCodeType, *, first_line: int | None = None) -> Iterator[Instruction]: ... -def show_code(co: _HaveCodeType, *, file: IO[str] | None = ...) -> None: ... +def show_code(co: _HaveCodeType, *, file: IO[str] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/archive_util.pyi b/mypy/typeshed/stdlib/distutils/archive_util.pyi index 38458fc0e003..a8947ce35c60 100644 --- a/mypy/typeshed/stdlib/distutils/archive_util.pyi +++ b/mypy/typeshed/stdlib/distutils/archive_util.pyi @@ -1,20 +1,20 @@ def make_archive( base_name: str, format: str, - root_dir: str | None = ..., - base_dir: str | None = ..., - verbose: int = ..., - dry_run: int = ..., - owner: str | None = ..., - group: str | None = ..., + root_dir: str | None = None, + base_dir: str | None = None, + verbose: int = 0, + dry_run: int = 0, + owner: str | None = None, + group: str | None = None, ) -> str: ... def make_tarball( base_name: str, base_dir: str, - compress: str | None = ..., - verbose: int = ..., - dry_run: int = ..., - owner: str | None = ..., - group: str | None = ..., + compress: str | None = "gzip", + verbose: int = 0, + dry_run: int = 0, + owner: str | None = None, + group: str | None = None, ) -> str: ... -def make_zipfile(base_name: str, base_dir: str, verbose: int = ..., dry_run: int = ...) -> str: ... +def make_zipfile(base_name: str, base_dir: str, verbose: int = 0, dry_run: int = 0) -> str: ... diff --git a/mypy/typeshed/stdlib/distutils/ccompiler.pyi b/mypy/typeshed/stdlib/distutils/ccompiler.pyi index 5b92c5f5c42e..e7277aa3f9c4 100644 --- a/mypy/typeshed/stdlib/distutils/ccompiler.pyi +++ b/mypy/typeshed/stdlib/distutils/ccompiler.pyi @@ -1,16 +1,16 @@ from collections.abc import Callable -from typing import Any, Union +from typing import Any from typing_extensions import TypeAlias -_Macro: TypeAlias = Union[tuple[str], tuple[str, str | None]] +_Macro: TypeAlias = tuple[str] | tuple[str, str | None] def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] ) -> list[str]: ... def gen_preprocess_options(macros: list[_Macro], include_dirs: list[str]) -> list[str]: ... -def get_default_compiler(osname: str | None = ..., platform: str | None = ...) -> str: ... +def get_default_compiler(osname: str | None = None, platform: str | None = None) -> str: ... def new_compiler( - plat: str | None = ..., compiler: str | None = ..., verbose: int = ..., dry_run: int = ..., force: int = ... + plat: str | None = None, compiler: str | None = None, verbose: int = 0, dry_run: int = 0, force: int = 0 ) -> CCompiler: ... def show_compilers() -> None: ... @@ -25,7 +25,7 @@ class CCompiler: library_dirs: list[str] runtime_library_dirs: list[str] objects: list[str] - def __init__(self, verbose: int = ..., dry_run: int = ..., force: int = ...) -> None: ... + def __init__(self, verbose: int = 0, dry_run: int = 0, force: int = 0) -> None: ... def add_include_dir(self, dir: str) -> None: ... def set_include_dirs(self, dirs: list[str]) -> None: ... def add_library(self, libname: str) -> None: ... @@ -34,7 +34,7 @@ class CCompiler: def set_library_dirs(self, dirs: list[str]) -> None: ... def add_runtime_library_dir(self, dir: str) -> None: ... def set_runtime_library_dirs(self, dirs: list[str]) -> None: ... - def define_macro(self, name: str, value: str | None = ...) -> None: ... + def define_macro(self, name: str, value: str | None = None) -> None: ... def undefine_macro(self, name: str) -> None: ... def add_link_object(self, object: str) -> None: ... def set_link_objects(self, objects: list[str]) -> None: ... @@ -43,10 +43,10 @@ class CCompiler: def has_function( self, funcname: str, - includes: list[str] | None = ..., - include_dirs: list[str] | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., + includes: list[str] | None = None, + include_dirs: list[str] | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, ) -> bool: ... def library_dir_option(self, dir: str) -> str: ... def library_option(self, lib: str) -> str: ... @@ -55,98 +55,98 @@ class CCompiler: def compile( self, sources: list[str], - output_dir: str | None = ..., - macros: _Macro | None = ..., - include_dirs: list[str] | None = ..., + output_dir: str | None = None, + macros: _Macro | None = None, + include_dirs: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - depends: list[str] | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + depends: list[str] | None = None, ) -> list[str]: ... def create_static_lib( self, objects: list[str], output_libname: str, - output_dir: str | None = ..., + output_dir: str | None = None, debug: bool = ..., - target_lang: str | None = ..., + target_lang: str | None = None, ) -> None: ... def link( self, target_desc: str, objects: list[str], output_filename: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - export_symbols: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - build_temp: str | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, ) -> None: ... def link_executable( self, objects: list[str], output_progname: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + target_lang: str | None = None, ) -> None: ... def link_shared_lib( self, objects: list[str], output_libname: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - export_symbols: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - build_temp: str | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, ) -> None: ... def link_shared_object( self, objects: list[str], output_filename: str, - output_dir: str | None = ..., - libraries: list[str] | None = ..., - library_dirs: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - export_symbols: list[str] | None = ..., + output_dir: str | None = None, + libraries: list[str] | None = None, + library_dirs: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + export_symbols: list[str] | None = None, debug: bool = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., - build_temp: str | None = ..., - target_lang: str | None = ..., + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, + build_temp: str | None = None, + target_lang: str | None = None, ) -> None: ... def preprocess( self, source: str, - output_file: str | None = ..., - macros: list[_Macro] | None = ..., - include_dirs: list[str] | None = ..., - extra_preargs: list[str] | None = ..., - extra_postargs: list[str] | None = ..., + output_file: str | None = None, + macros: list[_Macro] | None = None, + include_dirs: list[str] | None = None, + extra_preargs: list[str] | None = None, + extra_postargs: list[str] | None = None, ) -> None: ... - def executable_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... - def library_filename(self, libname: str, lib_type: str = ..., strip_dir: int = ..., output_dir: str = ...) -> str: ... - def object_filenames(self, source_filenames: list[str], strip_dir: int = ..., output_dir: str = ...) -> list[str]: ... - def shared_object_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... - def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = ..., level: int = ...) -> None: ... + def executable_filename(self, basename: str, strip_dir: int = 0, output_dir: str = "") -> str: ... + def library_filename(self, libname: str, lib_type: str = "static", strip_dir: int = 0, output_dir: str = "") -> str: ... + def object_filenames(self, source_filenames: list[str], strip_dir: int = 0, output_dir: str = "") -> list[str]: ... + def shared_object_filename(self, basename: str, strip_dir: int = 0, output_dir: str = "") -> str: ... + def execute(self, func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, level: int = 1) -> None: ... def spawn(self, cmd: list[str]) -> None: ... - def mkpath(self, name: str, mode: int = ...) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... def move_file(self, src: str, dst: str) -> str: ... - def announce(self, msg: str, level: int = ...) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... def warn(self, msg: str) -> None: ... def debug_print(self, msg: str) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/cmd.pyi b/mypy/typeshed/stdlib/distutils/cmd.pyi index e706bdbc5802..d9ffee9cb832 100644 --- a/mypy/typeshed/stdlib/distutils/cmd.pyi +++ b/mypy/typeshed/stdlib/distutils/cmd.pyi @@ -12,49 +12,43 @@ class Command: def finalize_options(self) -> None: ... @abstractmethod def run(self) -> None: ... - def announce(self, msg: str, level: int = ...) -> None: ... + def announce(self, msg: str, level: int = 1) -> None: ... def debug_print(self, msg: str) -> None: ... - def ensure_string(self, option: str, default: str | None = ...) -> None: ... + def ensure_string(self, option: str, default: str | None = None) -> None: ... def ensure_string_list(self, option: str | list[str]) -> None: ... def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... - def get_finalized_command(self, command: str, create: int = ...) -> Command: ... - def reinitialize_command(self, command: Command | str, reinit_subcommands: int = ...) -> Command: ... + def get_finalized_command(self, command: str, create: int = 1) -> Command: ... + def reinitialize_command(self, command: Command | str, reinit_subcommands: int = 0) -> Command: ... def run_command(self, command: str) -> None: ... def get_sub_commands(self) -> list[str]: ... def warn(self, msg: str) -> None: ... - def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = ..., level: int = ...) -> None: ... - def mkpath(self, name: str, mode: int = ...) -> None: ... + def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ... + def mkpath(self, name: str, mode: int = 0o777) -> None: ... def copy_file( - self, - infile: str, - outfile: str, - preserve_mode: int = ..., - preserve_times: int = ..., - link: str | None = ..., - level: Any = ..., + self, infile: str, outfile: str, preserve_mode: int = 1, preserve_times: int = 1, link: str | None = None, level: Any = 1 ) -> tuple[str, bool]: ... # level is not used def copy_tree( self, infile: str, outfile: str, - preserve_mode: int = ..., - preserve_times: int = ..., - preserve_symlinks: int = ..., - level: Any = ..., + preserve_mode: int = 1, + preserve_times: int = 1, + preserve_symlinks: int = 0, + level: Any = 1, ) -> list[str]: ... # level is not used - def move_file(self, src: str, dst: str, level: Any = ...) -> str: ... # level is not used - def spawn(self, cmd: Iterable[str], search_path: int = ..., level: Any = ...) -> None: ... # level is not used + def move_file(self, src: str, dst: str, level: Any = 1) -> str: ... # level is not used + def spawn(self, cmd: Iterable[str], search_path: int = 1, level: Any = 1) -> None: ... # level is not used def make_archive( self, base_name: str, format: str, - root_dir: str | None = ..., - base_dir: str | None = ..., - owner: str | None = ..., - group: str | None = ..., + root_dir: str | None = None, + base_dir: str | None = None, + owner: str | None = None, + group: str | None = None, ) -> str: ... def make_file( self, @@ -62,7 +56,7 @@ class Command: outfile: str, func: Callable[..., object], args: list[Any], - exec_msg: str | None = ..., - skip_msg: str | None = ..., - level: Any = ..., + exec_msg: str | None = None, + skip_msg: str | None = None, + level: Any = 1, ) -> None: ... # level is not used diff --git a/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi b/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi index 66202e841d3c..fa98e86d592a 100644 --- a/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi +++ b/mypy/typeshed/stdlib/distutils/command/bdist_msi.pyi @@ -9,9 +9,9 @@ if sys.platform == "win32": class PyDialog(Dialog): def __init__(self, *args, **kw) -> None: ... def title(self, title) -> None: ... - def back(self, title, next, name: str = ..., active: int = ...): ... - def cancel(self, title, next, name: str = ..., active: int = ...): ... - def next(self, title, next, name: str = ..., active: int = ...): ... + def back(self, title, next, name: str = "Back", active: int = 1): ... + def cancel(self, title, next, name: str = "Cancel", active: int = 1): ... + def next(self, title, next, name: str = "Next", active: int = 1): ... def xbutton(self, name, title, next, xpos): ... class bdist_msi(Command): diff --git a/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi b/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi index 1091fb278493..8491d3126200 100644 --- a/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi +++ b/mypy/typeshed/stdlib/distutils/command/bdist_wininst.pyi @@ -11,6 +11,6 @@ class bdist_wininst(Command): def finalize_options(self) -> None: ... def run(self) -> None: ... def get_inidata(self) -> str: ... - def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = ...) -> None: ... + def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = None) -> None: ... def get_installer_filename(self, fullname: str) -> str: ... def get_exe_bytes(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/distutils/command/build_py.pyi b/mypy/typeshed/stdlib/distutils/command/build_py.pyi index 3c6e022c2a10..ca4e4ed7e797 100644 --- a/mypy/typeshed/stdlib/distutils/command/build_py.pyi +++ b/mypy/typeshed/stdlib/distutils/command/build_py.pyi @@ -32,7 +32,7 @@ class build_py(Command): def find_all_modules(self): ... def get_source_files(self): ... def get_module_outfile(self, build_dir, package, module): ... - def get_outputs(self, include_bytecode: int = ...): ... + def get_outputs(self, include_bytecode: int = 1): ... def build_module(self, module, module_file, package): ... def build_modules(self) -> None: ... def build_packages(self) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/command/check.pyi b/mypy/typeshed/stdlib/distutils/command/check.pyi index cdbe40fff71d..9cbcc6c87f21 100644 --- a/mypy/typeshed/stdlib/distutils/command/check.pyi +++ b/mypy/typeshed/stdlib/distutils/command/check.pyi @@ -6,6 +6,8 @@ from ..cmd import Command _Reporter: TypeAlias = Any # really docutils.utils.Reporter # Only defined if docutils is installed. +# Depends on a third-party stub. Since distutils is deprecated anyway, +# it's easier to just suppress the "any subclassing" error. class SilentReporter(_Reporter): messages: Any def __init__( diff --git a/mypy/typeshed/stdlib/distutils/command/config.pyi b/mypy/typeshed/stdlib/distutils/command/config.pyi index 03466ca72985..81fdf76b2b59 100644 --- a/mypy/typeshed/stdlib/distutils/command/config.pyi +++ b/mypy/typeshed/stdlib/distutils/command/config.pyi @@ -24,60 +24,60 @@ class config(Command): def run(self) -> None: ... def try_cpp( self, - body: str | None = ..., - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - lang: str = ..., + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def search_cpp( self, pattern: Pattern[str] | str, - body: str | None = ..., - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - lang: str = ..., + body: str | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def try_compile( - self, body: str, headers: Sequence[str] | None = ..., include_dirs: Sequence[str] | None = ..., lang: str = ... + self, body: str, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, lang: str = "c" ) -> bool: ... def try_link( self, body: str, - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - libraries: Sequence[str] | None = ..., - library_dirs: Sequence[str] | None = ..., - lang: str = ..., + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def try_run( self, body: str, - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - libraries: Sequence[str] | None = ..., - library_dirs: Sequence[str] | None = ..., - lang: str = ..., + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + lang: str = "c", ) -> bool: ... def check_func( self, func: str, - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., - libraries: Sequence[str] | None = ..., - library_dirs: Sequence[str] | None = ..., - decl: int = ..., - call: int = ..., + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, + libraries: Sequence[str] | None = None, + library_dirs: Sequence[str] | None = None, + decl: int = 0, + call: int = 0, ) -> bool: ... def check_lib( self, library: str, - library_dirs: Sequence[str] | None = ..., - headers: Sequence[str] | None = ..., - include_dirs: Sequence[str] | None = ..., + library_dirs: Sequence[str] | None = None, + headers: Sequence[str] | None = None, + include_dirs: Sequence[str] | None = None, other_libraries: list[str] = ..., ) -> bool: ... def check_header( - self, header: str, include_dirs: Sequence[str] | None = ..., library_dirs: Sequence[str] | None = ..., lang: str = ... + self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" ) -> bool: ... -def dump_file(filename: str, head: Any | None = ...) -> None: ... +def dump_file(filename: str, head: Any | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/command/register.pyi b/mypy/typeshed/stdlib/distutils/command/register.pyi index a1a7a45fb3d7..f88b94113ff4 100644 --- a/mypy/typeshed/stdlib/distutils/command/register.pyi +++ b/mypy/typeshed/stdlib/distutils/command/register.pyi @@ -15,4 +15,4 @@ class register(PyPIRCCommand): def verify_metadata(self) -> None: ... def send_metadata(self) -> None: ... def build_post_data(self, action): ... - def post_to_server(self, data, auth: Any | None = ...): ... + def post_to_server(self, data, auth: Any | None = None): ... diff --git a/mypy/typeshed/stdlib/distutils/core.pyi b/mypy/typeshed/stdlib/distutils/core.pyi index 199a4d70a953..56081f921378 100644 --- a/mypy/typeshed/stdlib/distutils/core.pyi +++ b/mypy/typeshed/stdlib/distutils/core.pyi @@ -46,4 +46,4 @@ def setup( fullname: str = ..., **attrs: Any, ) -> None: ... -def run_setup(script_name: str, script_args: list[str] | None = ..., stop_after: str = ...) -> Distribution: ... +def run_setup(script_name: str, script_args: list[str] | None = None, stop_after: str = "run") -> Distribution: ... diff --git a/mypy/typeshed/stdlib/distutils/dep_util.pyi b/mypy/typeshed/stdlib/distutils/dep_util.pyi index 929d6ffd0c81..096ce19d4859 100644 --- a/mypy/typeshed/stdlib/distutils/dep_util.pyi +++ b/mypy/typeshed/stdlib/distutils/dep_util.pyi @@ -1,3 +1,3 @@ def newer(source: str, target: str) -> bool: ... def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ... -def newer_group(sources: list[str], target: str, missing: str = ...) -> bool: ... +def newer_group(sources: list[str], target: str, missing: str = "error") -> bool: ... diff --git a/mypy/typeshed/stdlib/distutils/dir_util.pyi b/mypy/typeshed/stdlib/distutils/dir_util.pyi index ffe5ff1cfbd4..2324a2d50caa 100644 --- a/mypy/typeshed/stdlib/distutils/dir_util.pyi +++ b/mypy/typeshed/stdlib/distutils/dir_util.pyi @@ -1,13 +1,13 @@ -def mkpath(name: str, mode: int = ..., verbose: int = ..., dry_run: int = ...) -> list[str]: ... -def create_tree(base_dir: str, files: list[str], mode: int = ..., verbose: int = ..., dry_run: int = ...) -> None: ... +def mkpath(name: str, mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> list[str]: ... +def create_tree(base_dir: str, files: list[str], mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> None: ... def copy_tree( src: str, dst: str, - preserve_mode: int = ..., - preserve_times: int = ..., - preserve_symlinks: int = ..., - update: int = ..., - verbose: int = ..., - dry_run: int = ..., + preserve_mode: int = 1, + preserve_times: int = 1, + preserve_symlinks: int = 0, + update: int = 0, + verbose: int = 1, + dry_run: int = 0, ) -> list[str]: ... -def remove_tree(directory: str, verbose: int = ..., dry_run: int = ...) -> None: ... +def remove_tree(directory: str, verbose: int = 1, dry_run: int = 0) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/dist.pyi b/mypy/typeshed/stdlib/distutils/dist.pyi index ef47e4e4d15a..b411324c4ce6 100644 --- a/mypy/typeshed/stdlib/distutils/dist.pyi +++ b/mypy/typeshed/stdlib/distutils/dist.pyi @@ -1,10 +1,10 @@ -from _typeshed import StrOrBytesPath, SupportsWrite +from _typeshed import FileDescriptorOrPath, SupportsWrite from collections.abc import Iterable, Mapping from distutils.cmd import Command from typing import IO, Any class DistributionMetadata: - def __init__(self, path: int | StrOrBytesPath | None = ...) -> None: ... + def __init__(self, path: FileDescriptorOrPath | None = None) -> None: ... name: str | None version: str | None author: str | None @@ -53,7 +53,7 @@ class DistributionMetadata: class Distribution: cmdclass: dict[str, type[Command]] metadata: DistributionMetadata - def __init__(self, attrs: Mapping[str, Any] | None = ...) -> None: ... + def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ... def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... - def parse_config_files(self, filenames: Iterable[str] | None = ...) -> None: ... + def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... def get_command_obj(self, command: str, create: bool = ...) -> Command | None: ... diff --git a/mypy/typeshed/stdlib/distutils/extension.pyi b/mypy/typeshed/stdlib/distutils/extension.pyi index 5639f44a6d03..789bbf6ec3d1 100644 --- a/mypy/typeshed/stdlib/distutils/extension.pyi +++ b/mypy/typeshed/stdlib/distutils/extension.pyi @@ -19,18 +19,18 @@ class Extension: self, name: str, sources: list[str], - include_dirs: list[str] | None = ..., - define_macros: list[tuple[str, str | None]] | None = ..., - undef_macros: list[str] | None = ..., - library_dirs: list[str] | None = ..., - libraries: list[str] | None = ..., - runtime_library_dirs: list[str] | None = ..., - extra_objects: list[str] | None = ..., - extra_compile_args: list[str] | None = ..., - extra_link_args: list[str] | None = ..., - export_symbols: list[str] | None = ..., - swig_opts: list[str] | None = ..., - depends: list[str] | None = ..., - language: str | None = ..., - optional: bool | None = ..., + include_dirs: list[str] | None = None, + define_macros: list[tuple[str, str | None]] | None = None, + undef_macros: list[str] | None = None, + library_dirs: list[str] | None = None, + libraries: list[str] | None = None, + runtime_library_dirs: list[str] | None = None, + extra_objects: list[str] | None = None, + extra_compile_args: list[str] | None = None, + extra_link_args: list[str] | None = None, + export_symbols: list[str] | None = None, + swig_opts: list[str] | None = None, + depends: list[str] | None = None, + language: str | None = None, + optional: bool | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi b/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi index 6a7124bd15ad..153583be6b5d 100644 --- a/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi +++ b/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -11,14 +11,14 @@ def fancy_getopt( def wrap_text(text: str, width: int) -> list[str]: ... class FancyGetopt: - def __init__(self, option_table: list[_Option] | None = ...) -> None: ... + def __init__(self, option_table: list[_Option] | None = None) -> None: ... # TODO kinda wrong, `getopt(object=object())` is invalid @overload - def getopt(self, args: list[str] | None = ...) -> _GR: ... + def getopt(self, args: list[str] | None = None) -> _GR: ... @overload def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... def get_option_order(self) -> list[tuple[str, str]]: ... - def generate_help(self, header: str | None = ...) -> list[str]: ... + def generate_help(self, header: str | None = None) -> list[str]: ... class OptionDummy: def __init__(self, options: Iterable[str] = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/file_util.pyi b/mypy/typeshed/stdlib/distutils/file_util.pyi index b3127841bce8..a97dfca60007 100644 --- a/mypy/typeshed/stdlib/distutils/file_util.pyi +++ b/mypy/typeshed/stdlib/distutils/file_util.pyi @@ -6,7 +6,7 @@ def copy_file( preserve_mode: bool = ..., preserve_times: bool = ..., update: bool = ..., - link: str | None = ..., + link: str | None = None, verbose: bool = ..., dry_run: bool = ..., ) -> tuple[str, str]: ... diff --git a/mypy/typeshed/stdlib/distutils/filelist.pyi b/mypy/typeshed/stdlib/distutils/filelist.pyi index 1cfdcf08dca9..bea48ac16ac5 100644 --- a/mypy/typeshed/stdlib/distutils/filelist.pyi +++ b/mypy/typeshed/stdlib/distutils/filelist.pyi @@ -7,9 +7,9 @@ from typing_extensions import Literal class FileList: allfiles: Iterable[str] | None files: list[str] - def __init__(self, warn: None = ..., debug_print: None = ...) -> None: ... + def __init__(self, warn: None = None, debug_print: None = None) -> None: ... def set_allfiles(self, allfiles: Iterable[str]) -> None: ... - def findall(self, dir: str = ...) -> None: ... + def findall(self, dir: str = ".") -> None: ... def debug_print(self, msg: str) -> None: ... def append(self, item: str) -> None: ... def extend(self, items: Iterable[str]) -> None: ... @@ -18,34 +18,34 @@ class FileList: def process_template_line(self, line: str) -> None: ... @overload def include_pattern( - self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 ) -> bool: ... @overload - def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... + def include_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload def include_pattern( - self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 ) -> bool: ... @overload def exclude_pattern( - self, pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[0, False] = ... + self, pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[0, False] = 0 ) -> bool: ... @overload - def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> bool: ... + def exclude_pattern(self, pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> bool: ... @overload def exclude_pattern( - self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + self, pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 ) -> bool: ... -def findall(dir: str = ...) -> list[str]: ... +def findall(dir: str = ".") -> list[str]: ... def glob_to_re(pattern: str) -> str: ... @overload def translate_pattern( - pattern: str, anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: Literal[False, 0] = ... + pattern: str, anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: Literal[False, 0] = 0 ) -> Pattern[str]: ... @overload -def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1] = ...) -> Pattern[str]: ... +def translate_pattern(pattern: str | Pattern[str], *, is_regex: Literal[True, 1]) -> Pattern[str]: ... @overload def translate_pattern( - pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = ..., prefix: str | None = ..., is_regex: int = ... + pattern: str | Pattern[str], anchor: bool | Literal[0, 1] = 1, prefix: str | None = None, is_regex: int = 0 ) -> Pattern[str]: ... diff --git a/mypy/typeshed/stdlib/distutils/log.pyi b/mypy/typeshed/stdlib/distutils/log.pyi index 549b569e7356..14ed8d8aefa8 100644 --- a/mypy/typeshed/stdlib/distutils/log.pyi +++ b/mypy/typeshed/stdlib/distutils/log.pyi @@ -7,7 +7,7 @@ ERROR: int FATAL: int class Log: - def __init__(self, threshold: int = ...) -> None: ... + def __init__(self, threshold: int = 3) -> None: ... def log(self, level: int, msg: str, *args: Any) -> None: ... def debug(self, msg: str, *args: Any) -> None: ... def info(self, msg: str, *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/spawn.pyi b/mypy/typeshed/stdlib/distutils/spawn.pyi index dda05ad7e85a..a8a2c4140b2d 100644 --- a/mypy/typeshed/stdlib/distutils/spawn.pyi +++ b/mypy/typeshed/stdlib/distutils/spawn.pyi @@ -1,2 +1,2 @@ def spawn(cmd: list[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ... -def find_executable(executable: str, path: str | None = ...) -> str | None: ... +def find_executable(executable: str, path: str | None = None) -> str | None: ... diff --git a/mypy/typeshed/stdlib/distutils/sysconfig.pyi b/mypy/typeshed/stdlib/distutils/sysconfig.pyi index bf7db9c8f06b..8b291e8b94a5 100644 --- a/mypy/typeshed/stdlib/distutils/sysconfig.pyi +++ b/mypy/typeshed/stdlib/distutils/sysconfig.pyi @@ -8,6 +8,6 @@ def get_config_var(name: str) -> int | str | None: ... def get_config_vars(*args: str) -> Mapping[str, int | str]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... -def get_python_inc(plat_specific: bool = ..., prefix: str | None = ...) -> str: ... -def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: str | None = ...) -> str: ... +def get_python_inc(plat_specific: bool = ..., prefix: str | None = None) -> str: ... +def get_python_lib(plat_specific: bool = ..., standard_lib: bool = ..., prefix: str | None = None) -> str: ... def customize_compiler(compiler: CCompiler) -> None: ... diff --git a/mypy/typeshed/stdlib/distutils/text_file.pyi b/mypy/typeshed/stdlib/distutils/text_file.pyi index ace642e027cf..4a6cf1db77c6 100644 --- a/mypy/typeshed/stdlib/distutils/text_file.pyi +++ b/mypy/typeshed/stdlib/distutils/text_file.pyi @@ -3,8 +3,8 @@ from typing import IO class TextFile: def __init__( self, - filename: str | None = ..., - file: IO[str] | None = ..., + filename: str | None = None, + file: IO[str] | None = None, *, strip_comments: bool = ..., lstrip_ws: bool = ..., @@ -15,7 +15,7 @@ class TextFile: ) -> None: ... def open(self, filename: str) -> None: ... def close(self) -> None: ... - def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = ...) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = None) -> None: ... def readline(self) -> str | None: ... def readlines(self) -> list[str]: ... def unreadline(self, line: str) -> str: ... diff --git a/mypy/typeshed/stdlib/distutils/util.pyi b/mypy/typeshed/stdlib/distutils/util.pyi index da8d66063536..f03844307581 100644 --- a/mypy/typeshed/stdlib/distutils/util.pyi +++ b/mypy/typeshed/stdlib/distutils/util.pyi @@ -1,4 +1,4 @@ -from _typeshed import StrPath +from _typeshed import StrPath, Unused from collections.abc import Callable, Container, Iterable, Mapping from typing import Any from typing_extensions import Literal @@ -10,33 +10,33 @@ def check_environ() -> None: ... def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... def split_quoted(s: str) -> list[str]: ... def execute( - func: Callable[..., object], args: tuple[Any, ...], msg: str | None = ..., verbose: bool = ..., dry_run: bool = ... + func: Callable[..., object], args: tuple[Any, ...], msg: str | None = None, verbose: bool = ..., dry_run: bool = ... ) -> None: ... def strtobool(val: str) -> Literal[0, 1]: ... def byte_compile( py_files: list[str], - optimize: int = ..., + optimize: int = 0, force: bool = ..., - prefix: str | None = ..., - base_dir: str | None = ..., + prefix: str | None = None, + base_dir: str | None = None, verbose: bool = ..., dry_run: bool = ..., - direct: bool | None = ..., + direct: bool | None = None, ) -> None: ... def rfc822_escape(header: str) -> str: ... def run_2to3( files: Iterable[str], - fixer_names: Iterable[str] | None = ..., - options: Mapping[str, Any] | None = ..., - explicit: Container[str] | None = ..., # unused + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Unused = None, ) -> None: ... def copydir_run_2to3( src: StrPath, dest: StrPath, - template: str | None = ..., - fixer_names: Iterable[str] | None = ..., - options: Mapping[str, Any] | None = ..., - explicit: Container[str] | None = ..., + template: str | None = None, + fixer_names: Iterable[str] | None = None, + options: Mapping[str, Any] | None = None, + explicit: Container[str] | None = None, ) -> list[str]: ... class Mixin2to3: diff --git a/mypy/typeshed/stdlib/distutils/version.pyi b/mypy/typeshed/stdlib/distutils/version.pyi index 627d45067b5c..47da65ef87aa 100644 --- a/mypy/typeshed/stdlib/distutils/version.pyi +++ b/mypy/typeshed/stdlib/distutils/version.pyi @@ -1,36 +1,36 @@ -from _typeshed import Self from abc import abstractmethod from re import Pattern +from typing_extensions import Self class Version: def __eq__(self, other: object) -> bool: ... - def __lt__(self: Self, other: Self | str) -> bool: ... - def __le__(self: Self, other: Self | str) -> bool: ... - def __gt__(self: Self, other: Self | str) -> bool: ... - def __ge__(self: Self, other: Self | str) -> bool: ... + def __lt__(self, other: Self | str) -> bool: ... + def __le__(self, other: Self | str) -> bool: ... + def __gt__(self, other: Self | str) -> bool: ... + def __ge__(self, other: Self | str) -> bool: ... @abstractmethod - def __init__(self, vstring: str | None = ...) -> None: ... + def __init__(self, vstring: str | None = None) -> None: ... @abstractmethod - def parse(self: Self, vstring: str) -> Self: ... + def parse(self, vstring: str) -> Self: ... @abstractmethod def __str__(self) -> str: ... @abstractmethod - def _cmp(self: Self, other: Self | str) -> bool: ... + def _cmp(self, other: Self | str) -> bool: ... class StrictVersion(Version): version_re: Pattern[str] version: tuple[int, int, int] prerelease: tuple[str, int] | None - def __init__(self, vstring: str | None = ...) -> None: ... - def parse(self: Self, vstring: str) -> Self: ... + def __init__(self, vstring: str | None = None) -> None: ... + def parse(self, vstring: str) -> Self: ... def __str__(self) -> str: ... # noqa: Y029 - def _cmp(self: Self, other: Self | str) -> bool: ... + def _cmp(self, other: Self | str) -> bool: ... class LooseVersion(Version): component_re: Pattern[str] vstring: str version: tuple[str | int, ...] - def __init__(self, vstring: str | None = ...) -> None: ... - def parse(self: Self, vstring: str) -> Self: ... + def __init__(self, vstring: str | None = None) -> None: ... + def parse(self, vstring: str) -> Self: ... def __str__(self) -> str: ... # noqa: Y029 - def _cmp(self: Self, other: Self | str) -> bool: ... + def _cmp(self, other: Self | str) -> bool: ... diff --git a/mypy/typeshed/stdlib/doctest.pyi b/mypy/typeshed/stdlib/doctest.pyi index 382d9578ce80..88d066fdc23c 100644 --- a/mypy/typeshed/stdlib/doctest.pyi +++ b/mypy/typeshed/stdlib/doctest.pyi @@ -80,10 +80,10 @@ class Example: self, source: str, want: str, - exc_msg: str | None = ..., - lineno: int = ..., - indent: int = ..., - options: dict[int, bool] | None = ..., + exc_msg: str | None = None, + lineno: int = 0, + indent: int = 0, + options: dict[int, bool] | None = None, ) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -107,21 +107,21 @@ class DocTest: def __eq__(self, other: object) -> bool: ... class DocTestParser: - def parse(self, string: str, name: str = ...) -> list[str | Example]: ... + def parse(self, string: str, name: str = "") -> list[str | Example]: ... def get_doctest(self, string: str, globs: dict[str, Any], name: str, filename: str | None, lineno: int | None) -> DocTest: ... - def get_examples(self, string: str, name: str = ...) -> list[Example]: ... + def get_examples(self, string: str, name: str = "") -> list[Example]: ... class DocTestFinder: def __init__( - self, verbose: bool = ..., parser: DocTestParser = ..., recurse: bool = ..., exclude_empty: bool = ... + self, verbose: bool = False, parser: DocTestParser = ..., recurse: bool = True, exclude_empty: bool = True ) -> None: ... def find( self, obj: object, - name: str | None = ..., - module: None | bool | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., - extraglobs: dict[str, Any] | None = ..., + name: str | None = None, + module: None | bool | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, ) -> list[DocTest]: ... _Out: TypeAlias = Callable[[str], object] @@ -133,15 +133,15 @@ class DocTestRunner: tries: int failures: int test: DocTest - def __init__(self, checker: OutputChecker | None = ..., verbose: bool | None = ..., optionflags: int = ...) -> None: ... + def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ... def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ... def report_success(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_failure(self, out: _Out, test: DocTest, example: Example, got: str) -> None: ... def report_unexpected_exception(self, out: _Out, test: DocTest, example: Example, exc_info: ExcInfo) -> None: ... def run( - self, test: DocTest, compileflags: int | None = ..., out: _Out | None = ..., clear_globs: bool = ... + self, test: DocTest, compileflags: int | None = None, out: _Out | None = None, clear_globs: bool = True ) -> TestResults: ... - def summarize(self, verbose: bool | None = ...) -> TestResults: ... + def summarize(self, verbose: bool | None = None) -> TestResults: ... def merge(self, other: DocTestRunner) -> None: ... class OutputChecker: @@ -165,32 +165,37 @@ class DebugRunner(DocTestRunner): ... master: DocTestRunner | None def testmod( - m: types.ModuleType | None = ..., - name: str | None = ..., - globs: dict[str, Any] | None = ..., - verbose: bool | None = ..., - report: bool = ..., - optionflags: int = ..., - extraglobs: dict[str, Any] | None = ..., - raise_on_error: bool = ..., - exclude_empty: bool = ..., + m: types.ModuleType | None = None, + name: str | None = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, + exclude_empty: bool = False, ) -> TestResults: ... def testfile( filename: str, - module_relative: bool = ..., - name: str | None = ..., - package: None | str | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., - verbose: bool | None = ..., - report: bool = ..., - optionflags: int = ..., - extraglobs: dict[str, Any] | None = ..., - raise_on_error: bool = ..., + module_relative: bool = True, + name: str | None = None, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + verbose: bool | None = None, + report: bool = True, + optionflags: int = 0, + extraglobs: dict[str, Any] | None = None, + raise_on_error: bool = False, parser: DocTestParser = ..., - encoding: str | None = ..., + encoding: str | None = None, ) -> TestResults: ... def run_docstring_examples( - f: object, globs: dict[str, Any], verbose: bool = ..., name: str = ..., compileflags: int | None = ..., optionflags: int = ... + f: object, + globs: dict[str, Any], + verbose: bool = False, + name: str = "NoName", + compileflags: int | None = None, + optionflags: int = 0, ) -> None: ... def set_unittest_reportflags(flags: int) -> int: ... @@ -198,52 +203,43 @@ class DocTestCase(unittest.TestCase): def __init__( self, test: DocTest, - optionflags: int = ..., - setUp: Callable[[DocTest], object] | None = ..., - tearDown: Callable[[DocTest], object] | None = ..., - checker: OutputChecker | None = ..., + optionflags: int = 0, + setUp: Callable[[DocTest], Any] | None = None, + tearDown: Callable[[DocTest], Any] | None = None, + checker: OutputChecker | None = None, ) -> None: ... - def setUp(self) -> None: ... - def tearDown(self) -> None: ... def runTest(self) -> None: ... def format_failure(self, err: str) -> str: ... - def debug(self) -> None: ... - def id(self) -> str: ... def __eq__(self, other: object) -> bool: ... - def shortDescription(self) -> str: ... class SkipDocTestCase(DocTestCase): def __init__(self, module: types.ModuleType) -> None: ... - def setUp(self) -> None: ... def test_skip(self) -> None: ... - def shortDescription(self) -> str: ... class _DocTestSuite(unittest.TestSuite): ... def DocTestSuite( - module: None | str | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., - extraglobs: dict[str, Any] | None = ..., - test_finder: DocTestFinder | None = ..., + module: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, + extraglobs: dict[str, Any] | None = None, + test_finder: DocTestFinder | None = None, **options: Any, ) -> _DocTestSuite: ... -class DocFileCase(DocTestCase): - def id(self) -> str: ... - def format_failure(self, err: str) -> str: ... +class DocFileCase(DocTestCase): ... def DocFileTest( path: str, - module_relative: bool = ..., - package: None | str | types.ModuleType = ..., - globs: dict[str, Any] | None = ..., + module_relative: bool = True, + package: None | str | types.ModuleType = None, + globs: dict[str, Any] | None = None, parser: DocTestParser = ..., - encoding: str | None = ..., + encoding: str | None = None, **options: Any, ) -> DocFileCase: ... def DocFileSuite(*paths: str, **kw: Any) -> _DocTestSuite: ... def script_from_examples(s: str) -> str: ... def testsource(module: None | str | types.ModuleType, name: str) -> str: ... -def debug_src(src: str, pm: bool = ..., globs: dict[str, Any] | None = ...) -> None: ... -def debug_script(src: str, pm: bool = ..., globs: dict[str, Any] | None = ...) -> None: ... -def debug(module: None | str | types.ModuleType, name: str, pm: bool = ...) -> None: ... +def debug_src(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug_script(src: str, pm: bool = False, globs: dict[str, Any] | None = None) -> None: ... +def debug(module: None | str | types.ModuleType, name: str, pm: bool = False) -> None: ... diff --git a/mypy/typeshed/stdlib/email/__init__.pyi b/mypy/typeshed/stdlib/email/__init__.pyi index 4591b2c3340e..fca302f5f1a7 100644 --- a/mypy/typeshed/stdlib/email/__init__.pyi +++ b/mypy/typeshed/stdlib/email/__init__.pyi @@ -1,15 +1,15 @@ from collections.abc import Callable from email.message import Message from email.policy import Policy -from typing import IO, Union +from typing import IO from typing_extensions import TypeAlias # Definitions imported by multiple submodules in typeshed -_ParamType: TypeAlias = Union[str, tuple[str | None, str | None, str]] # noqa: Y047 -_ParamsType: TypeAlias = Union[str, None, tuple[str, str | None, str]] # noqa: Y047 +_ParamType: TypeAlias = str | tuple[str | None, str | None, str] # noqa: Y047 +_ParamsType: TypeAlias = str | None | tuple[str, str | None, str] # noqa: Y047 def message_from_string(s: str, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... -def message_from_bytes(s: bytes, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... +def message_from_bytes(s: bytes | bytearray, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_file(fp: IO[str], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... def message_from_binary_file(fp: IO[bytes], _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> Message: ... diff --git a/mypy/typeshed/stdlib/email/_header_value_parser.pyi b/mypy/typeshed/stdlib/email/_header_value_parser.pyi index 00d5c9882429..97008140ec5d 100644 --- a/mypy/typeshed/stdlib/email/_header_value_parser.pyi +++ b/mypy/typeshed/stdlib/email/_header_value_parser.pyi @@ -1,11 +1,10 @@ import sys -from _typeshed import Self from collections.abc import Iterable, Iterator from email.errors import HeaderParseError, MessageDefect from email.policy import Policy from re import Pattern from typing import Any -from typing_extensions import Final +from typing_extensions import Final, Self WSP: Final[set[str]] CFWS_LEADER: Final[set[str]] @@ -39,14 +38,10 @@ class TokenList(list[TokenList | Terminal]): @property def comments(self) -> list[str]: ... def fold(self, *, policy: Policy) -> str: ... - def pprint(self, indent: str = ...) -> None: ... - def ppstr(self, indent: str = ...) -> str: ... + def pprint(self, indent: str = "") -> None: ... + def ppstr(self, indent: str = "") -> str: ... -class WhiteSpaceTokenList(TokenList): - @property - def value(self) -> str: ... - @property - def comments(self) -> list[str]: ... +class WhiteSpaceTokenList(TokenList): ... class UnstructuredTokenList(TokenList): token_type: str @@ -84,16 +79,12 @@ class QuotedString(TokenList): class BareQuotedString(QuotedString): token_type: str - @property - def value(self) -> str: ... class Comment(WhiteSpaceTokenList): token_type: str def quote(self, value: Any) -> str: ... @property def content(self) -> str: ... - @property - def comments(self) -> list[str]: ... class AddressList(TokenList): token_type: str @@ -217,8 +208,6 @@ class AddrSpec(TokenList): @property def domain(self) -> str: ... @property - def value(self) -> str: ... - @property def addr_spec(self) -> str: ... class ObsLocalPart(TokenList): @@ -227,18 +216,13 @@ class ObsLocalPart(TokenList): class DisplayName(Phrase): token_type: str - ew_combine_allowed: bool @property def display_name(self) -> str: ... - @property - def value(self) -> str: ... class LocalPart(TokenList): token_type: str as_ew_allowed: bool @property - def value(self) -> str: ... - @property def local_part(self) -> str: ... class DomainLiteral(TokenList): @@ -333,7 +317,7 @@ class Terminal(str): syntactic_break: bool token_type: str defects: list[MessageDefect] - def __new__(cls: type[Self], value: str, token_type: str) -> Self: ... + def __new__(cls, value: str, token_type: str) -> Self: ... def pprint(self) -> None: ... @property def all_defects(self) -> list[MessageDefect]: ... @@ -352,10 +336,7 @@ class ValueTerminal(Terminal): def value(self) -> ValueTerminal: ... def startswith_fws(self) -> bool: ... -class EWWhiteSpaceTerminal(WhiteSpaceTerminal): - @property - def value(self) -> str: ... - +class EWWhiteSpaceTerminal(WhiteSpaceTerminal): ... class _InvalidEwError(HeaderParseError): ... DOT: Final[ValueTerminal] diff --git a/mypy/typeshed/stdlib/email/base64mime.pyi b/mypy/typeshed/stdlib/email/base64mime.pyi index e55658046f55..563cd7f669a2 100644 --- a/mypy/typeshed/stdlib/email/base64mime.pyi +++ b/mypy/typeshed/stdlib/email/base64mime.pyi @@ -1,9 +1,13 @@ __all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] -def header_length(bytearray: str | bytes) -> int: ... -def header_encode(header_bytes: str | bytes, charset: str = ...) -> str: ... -def body_encode(s: bytes, maxlinelen: int = ..., eol: str = ...) -> str: ... -def decode(string: str | bytes) -> bytes: ... +from _typeshed import ReadableBuffer + +def header_length(bytearray: str | bytes | bytearray) -> int: ... +def header_encode(header_bytes: str | ReadableBuffer, charset: str = "iso-8859-1") -> str: ... + +# First argument should be a buffer that supports slicing and len(). +def body_encode(s: bytes | bytearray, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(string: str | ReadableBuffer) -> bytes: ... body_decode = decode decodestring = decode diff --git a/mypy/typeshed/stdlib/email/charset.pyi b/mypy/typeshed/stdlib/email/charset.pyi index 236908537f83..24b8fd768b7b 100644 --- a/mypy/typeshed/stdlib/email/charset.pyi +++ b/mypy/typeshed/stdlib/email/charset.pyi @@ -13,7 +13,7 @@ class Charset: output_charset: str | None input_codec: str | None output_codec: str | None - def __init__(self, input_charset: str = ...) -> None: ... + def __init__(self, input_charset: str = "us-ascii") -> None: ... def get_body_encoding(self) -> str: ... def get_output_charset(self) -> str | None: ... def header_encode(self, string: str) -> str: ... @@ -23,7 +23,7 @@ class Charset: def __ne__(self, __other: object) -> bool: ... def add_charset( - charset: str, header_enc: int | None = ..., body_enc: int | None = ..., output_charset: str | None = ... + charset: str, header_enc: int | None = None, body_enc: int | None = None, output_charset: str | None = None ) -> None: ... def add_alias(alias: str, canonical: str) -> None: ... def add_codec(charset: str, codecname: str) -> None: ... diff --git a/mypy/typeshed/stdlib/email/contentmanager.pyi b/mypy/typeshed/stdlib/email/contentmanager.pyi index 3ac665eaa7bf..3214f1a4781d 100644 --- a/mypy/typeshed/stdlib/email/contentmanager.pyi +++ b/mypy/typeshed/stdlib/email/contentmanager.pyi @@ -3,7 +3,6 @@ from email.message import Message from typing import Any class ContentManager: - def __init__(self) -> None: ... def get_content(self, msg: Message, *args: Any, **kw: Any) -> Any: ... def set_content(self, msg: Message, obj: Any, *args: Any, **kw: Any) -> Any: ... def add_get_handler(self, key: str, handler: Callable[..., Any]) -> None: ... diff --git a/mypy/typeshed/stdlib/email/errors.pyi b/mypy/typeshed/stdlib/email/errors.pyi index 656cbd374ac4..c54f1560c9ae 100644 --- a/mypy/typeshed/stdlib/email/errors.pyi +++ b/mypy/typeshed/stdlib/email/errors.pyi @@ -8,7 +8,7 @@ class MultipartConversionError(MessageError, TypeError): ... class CharsetError(MessageError): ... class MessageDefect(ValueError): - def __init__(self, line: str | None = ...) -> None: ... + def __init__(self, line: str | None = None) -> None: ... class NoBoundaryInMultipartDefect(MessageDefect): ... class StartBoundaryNotFoundDefect(MessageDefect): ... diff --git a/mypy/typeshed/stdlib/email/feedparser.pyi b/mypy/typeshed/stdlib/email/feedparser.pyi index c535c353daad..4b7f73b9c015 100644 --- a/mypy/typeshed/stdlib/email/feedparser.pyi +++ b/mypy/typeshed/stdlib/email/feedparser.pyi @@ -9,7 +9,7 @@ _MessageT = TypeVar("_MessageT", bound=Message) class FeedParser(Generic[_MessageT]): @overload - def __init__(self: FeedParser[Message], _factory: None = ..., *, policy: Policy = ...) -> None: ... + def __init__(self: FeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... def feed(self, data: str) -> None: ... @@ -17,8 +17,8 @@ class FeedParser(Generic[_MessageT]): class BytesFeedParser(Generic[_MessageT]): @overload - def __init__(self: BytesFeedParser[Message], _factory: None = ..., *, policy: Policy = ...) -> None: ... + def __init__(self: BytesFeedParser[Message], _factory: None = None, *, policy: Policy = ...) -> None: ... @overload def __init__(self, _factory: Callable[[], _MessageT], *, policy: Policy = ...) -> None: ... - def feed(self, data: bytes) -> None: ... + def feed(self, data: bytes | bytearray) -> None: ... def close(self) -> _MessageT: ... diff --git a/mypy/typeshed/stdlib/email/generator.pyi b/mypy/typeshed/stdlib/email/generator.pyi index 5a6b6374dd4b..8362dd9c4ff6 100644 --- a/mypy/typeshed/stdlib/email/generator.pyi +++ b/mypy/typeshed/stdlib/email/generator.pyi @@ -10,12 +10,12 @@ class Generator: def __init__( self, outfp: SupportsWrite[str], - mangle_from_: bool | None = ..., - maxheaderlen: int | None = ..., + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, ) -> None: ... - def flatten(self, msg: Message, unixfrom: bool = ..., linesep: str | None = ...) -> None: ... + def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... class BytesGenerator: def clone(self, fp: SupportsWrite[bytes]) -> BytesGenerator: ... @@ -23,20 +23,20 @@ class BytesGenerator: def __init__( self, outfp: SupportsWrite[bytes], - mangle_from_: bool | None = ..., - maxheaderlen: int | None = ..., + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, ) -> None: ... - def flatten(self, msg: Message, unixfrom: bool = ..., linesep: str | None = ...) -> None: ... + def flatten(self, msg: Message, unixfrom: bool = False, linesep: str | None = None) -> None: ... class DecodedGenerator(Generator): def __init__( self, outfp: SupportsWrite[str], - mangle_from_: bool | None = ..., - maxheaderlen: int | None = ..., - fmt: str | None = ..., + mangle_from_: bool | None = None, + maxheaderlen: int | None = None, + fmt: str | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/header.pyi b/mypy/typeshed/stdlib/email/header.pyi index 9248759168a9..c6f0c6fbf6fc 100644 --- a/mypy/typeshed/stdlib/email/header.pyi +++ b/mypy/typeshed/stdlib/email/header.pyi @@ -1,3 +1,4 @@ +from collections.abc import Iterable from email.charset import Charset from typing import Any @@ -6,15 +7,15 @@ __all__ = ["Header", "decode_header", "make_header"] class Header: def __init__( self, - s: bytes | str | None = ..., - charset: Charset | str | None = ..., - maxlinelen: int | None = ..., - header_name: str | None = ..., - continuation_ws: str = ..., - errors: str = ..., + s: bytes | bytearray | str | None = None, + charset: Charset | str | None = None, + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", + errors: str = "strict", ) -> None: ... - def append(self, s: bytes | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... - def encode(self, splitchars: str = ..., maxlinelen: int | None = ..., linesep: str = ...) -> str: ... + def append(self, s: bytes | bytearray | str, charset: Charset | str | None = None, errors: str = "strict") -> None: ... + def encode(self, splitchars: str = ";, \t", maxlinelen: int | None = None, linesep: str = "\n") -> str: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... @@ -23,8 +24,8 @@ class Header: # contains at least one encoded part. def decode_header(header: Header | str) -> list[tuple[Any, Any | None]]: ... def make_header( - decoded_seq: list[tuple[bytes, str | None]], - maxlinelen: int | None = ..., - header_name: str | None = ..., - continuation_ws: str = ..., + decoded_seq: Iterable[tuple[bytes | bytearray | str, str | None]], + maxlinelen: int | None = None, + header_name: str | None = None, + continuation_ws: str = " ", ) -> Header: ... diff --git a/mypy/typeshed/stdlib/email/headerregistry.pyi b/mypy/typeshed/stdlib/email/headerregistry.pyi index b2b63c4ac72c..e158e89818f7 100644 --- a/mypy/typeshed/stdlib/email/headerregistry.pyi +++ b/mypy/typeshed/stdlib/email/headerregistry.pyi @@ -1,6 +1,5 @@ import sys import types -from _typeshed import Self from collections.abc import Iterable, Mapping from datetime import datetime as _datetime from email._header_value_parser import ( @@ -15,7 +14,7 @@ from email._header_value_parser import ( from email.errors import MessageDefect from email.policy import Policy from typing import Any, ClassVar, Protocol -from typing_extensions import Literal +from typing_extensions import Literal, Self class BaseHeader(str): # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) @@ -24,7 +23,7 @@ class BaseHeader(str): def name(self) -> str: ... @property def defects(self) -> tuple[MessageDefect, ...]: ... - def __new__(cls: type[Self], name: str, value: Any) -> Self: ... + def __new__(cls, name: str, value: Any) -> Self: ... def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... def fold(self, *, policy: Policy) -> str: ... @@ -153,7 +152,7 @@ class HeaderRegistry: base_class: type[BaseHeader] default_class: type[_HeaderParser] def __init__( - self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = ... + self, base_class: type[BaseHeader] = ..., default_class: type[_HeaderParser] = ..., use_default_map: bool = True ) -> None: ... def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... def __getitem__(self, name: str) -> type[BaseHeader]: ... @@ -169,7 +168,7 @@ class Address: @property def addr_spec(self) -> str: ... def __init__( - self, display_name: str = ..., username: str | None = ..., domain: str | None = ..., addr_spec: str | None = ... + self, display_name: str = "", username: str | None = "", domain: str | None = "", addr_spec: str | None = None ) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -178,5 +177,5 @@ class Group: def display_name(self) -> str | None: ... @property def addresses(self) -> tuple[Address, ...]: ... - def __init__(self, display_name: str | None = ..., addresses: Iterable[Address] | None = ...) -> None: ... + def __init__(self, display_name: str | None = None, addresses: Iterable[Address] | None = None) -> None: ... def __eq__(self, other: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/email/iterators.pyi b/mypy/typeshed/stdlib/email/iterators.pyi index 29068819ac15..d964d6843833 100644 --- a/mypy/typeshed/stdlib/email/iterators.pyi +++ b/mypy/typeshed/stdlib/email/iterators.pyi @@ -4,9 +4,9 @@ from email.message import Message __all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] -def body_line_iterator(msg: Message, decode: bool = ...) -> Iterator[str]: ... -def typed_subpart_iterator(msg: Message, maintype: str = ..., subtype: str | None = ...) -> Iterator[str]: ... +def body_line_iterator(msg: Message, decode: bool = False) -> Iterator[str]: ... +def typed_subpart_iterator(msg: Message, maintype: str = "text", subtype: str | None = None) -> Iterator[str]: ... def walk(self: Message) -> Iterator[Message]: ... # We include the seemingly private function because it is documented in the stdlib documentation. -def _structure(msg: Message, fp: SupportsWrite[str] | None = ..., level: int = ..., include_default: bool = ...) -> None: ... +def _structure(msg: Message, fp: SupportsWrite[str] | None = None, level: int = 0, include_default: bool = False) -> None: ... diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 4e8f600f7ffd..14e018073103 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -1,18 +1,17 @@ -from _typeshed import Self from collections.abc import Generator, Iterator, Sequence from email import _ParamsType, _ParamType from email.charset import Charset from email.contentmanager import ContentManager from email.errors import MessageDefect from email.policy import Policy -from typing import Any, TypeVar -from typing_extensions import TypeAlias +from typing import Any, TypeVar, overload +from typing_extensions import Self, TypeAlias __all__ = ["Message", "EmailMessage"] _T = TypeVar("_T") -_PayloadType: TypeAlias = list[Message] | str | bytes +_PayloadType: TypeAlias = list[Message] | str | bytes | bytearray _CharsetType: TypeAlias = Charset | str | None _HeaderType: TypeAlias = Any @@ -25,8 +24,8 @@ class Message: def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> str | None: ... def attach(self, payload: Message) -> None: ... - def get_payload(self, i: int | None = ..., decode: bool = ...) -> Any: ... # returns _PayloadType | None - def set_payload(self, payload: _PayloadType, charset: _CharsetType = ...) -> None: ... + def get_payload(self, i: int | None = None, decode: bool = False) -> Any: ... # returns _PayloadType | None + def set_payload(self, payload: _PayloadType, charset: _CharsetType = None) -> None: ... def set_charset(self, charset: _CharsetType) -> None: ... def get_charset(self) -> _CharsetType: ... def __len__(self) -> int: ... @@ -38,8 +37,14 @@ class Message: def keys(self) -> list[str]: ... def values(self) -> list[_HeaderType]: ... def items(self) -> list[tuple[str, _HeaderType]]: ... - def get(self, name: str, failobj: _T = ...) -> _HeaderType | _T: ... - def get_all(self, name: str, failobj: _T = ...) -> list[_HeaderType] | _T: ... + @overload + def get(self, name: str, failobj: None = None) -> _HeaderType | None: ... + @overload + def get(self, name: str, failobj: _T) -> _HeaderType | _T: ... + @overload + def get_all(self, name: str, failobj: None = None) -> list[_HeaderType] | None: ... + @overload + def get_all(self, name: str, failobj: _T) -> list[_HeaderType] | _T: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... def replace_header(self, _name: str, _value: _HeaderType) -> None: ... def get_content_type(self) -> str: ... @@ -47,51 +52,73 @@ class Message: def get_content_subtype(self) -> str: ... def get_default_type(self) -> str: ... def set_default_type(self, ctype: str) -> None: ... - def get_params(self, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> list[tuple[str, str]] | _T: ... - def get_param(self, param: str, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> _T | _ParamType: ... - def del_param(self, param: str, header: str = ..., requote: bool = ...) -> None: ... - def set_type(self, type: str, header: str = ..., requote: bool = ...) -> None: ... - def get_filename(self, failobj: _T = ...) -> _T | str: ... - def get_boundary(self, failobj: _T = ...) -> _T | str: ... + @overload + def get_params( + self, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> list[tuple[str, str]] | None: ... + @overload + def get_params(self, failobj: _T, header: str = "content-type", unquote: bool = True) -> list[tuple[str, str]] | _T: ... + @overload + def get_param( + self, param: str, failobj: None = None, header: str = "content-type", unquote: bool = True + ) -> _ParamType | None: ... + @overload + def get_param(self, param: str, failobj: _T, header: str = "content-type", unquote: bool = True) -> _ParamType | _T: ... + def del_param(self, param: str, header: str = "content-type", requote: bool = True) -> None: ... + def set_type(self, type: str, header: str = "Content-Type", requote: bool = True) -> None: ... + @overload + def get_filename(self, failobj: None = None) -> str | None: ... + @overload + def get_filename(self, failobj: _T) -> str | _T: ... + @overload + def get_boundary(self, failobj: None = None) -> str | None: ... + @overload + def get_boundary(self, failobj: _T) -> str | _T: ... def set_boundary(self, boundary: str) -> None: ... - def get_content_charset(self, failobj: _T = ...) -> _T | str: ... - def get_charsets(self, failobj: _T = ...) -> _T | list[str]: ... - def walk(self: Self) -> Generator[Self, None, None]: ... + @overload + def get_content_charset(self) -> str | None: ... + @overload + def get_content_charset(self, failobj: _T) -> str | _T: ... + @overload + def get_charsets(self, failobj: None = None) -> list[str] | None: ... + @overload + def get_charsets(self, failobj: _T) -> list[str] | _T: ... + def walk(self) -> Generator[Self, None, None]: ... def get_content_disposition(self) -> str | None: ... - def as_string(self, unixfrom: bool = ..., maxheaderlen: int = ..., policy: Policy | None = ...) -> str: ... - def as_bytes(self, unixfrom: bool = ..., policy: Policy | None = ...) -> bytes: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy | None = None) -> str: ... + def as_bytes(self, unixfrom: bool = False, policy: Policy | None = None) -> bytes: ... def __bytes__(self) -> bytes: ... def set_param( self, param: str, value: str, - header: str = ..., - requote: bool = ..., - charset: str | None = ..., - language: str = ..., - replace: bool = ..., + header: str = "Content-Type", + requote: bool = True, + charset: str | None = None, + language: str = "", + replace: bool = False, ) -> None: ... def __init__(self, policy: Policy = ...) -> None: ... # The following two methods are undocumented, but a source code comment states that they are public API - def set_raw(self, name: str, value: str) -> None: ... - def raw_items(self) -> Iterator[tuple[str, str]]: ... + def set_raw(self, name: str, value: _HeaderType) -> None: ... + def raw_items(self) -> Iterator[tuple[str, _HeaderType]]: ... class MIMEPart(Message): - def __init__(self, policy: Policy | None = ...) -> None: ... + def __init__(self, policy: Policy | None = None) -> None: ... def get_body(self, preferencelist: Sequence[str] = ...) -> Message | None: ... def iter_attachments(self) -> Iterator[Message]: ... def iter_parts(self) -> Iterator[Message]: ... - def get_content(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> Any: ... - def set_content(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... - def make_related(self, boundary: str | None = ...) -> None: ... - def make_alternative(self, boundary: str | None = ...) -> None: ... - def make_mixed(self, boundary: str | None = ...) -> None: ... + def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... + def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ... + def make_related(self, boundary: str | None = None) -> None: ... + def make_alternative(self, boundary: str | None = None) -> None: ... + def make_mixed(self, boundary: str | None = None) -> None: ... def add_related(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def add_alternative(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def add_attachment(self, *args: Any, content_manager: ContentManager | None = ..., **kw: Any) -> None: ... def clear(self) -> None: ... def clear_content(self) -> None: ... - def as_string(self, unixfrom: bool = ..., maxheaderlen: int | None = ..., policy: Policy | None = ...) -> str: ... + def as_string(self, unixfrom: bool = False, maxheaderlen: int | None = None, policy: Policy | None = None) -> str: ... def is_attachment(self) -> bool: ... class EmailMessage(MIMEPart): ... diff --git a/mypy/typeshed/stdlib/email/mime/application.pyi b/mypy/typeshed/stdlib/email/mime/application.pyi index dfff85265ade..a7ab9dc75ce2 100644 --- a/mypy/typeshed/stdlib/email/mime/application.pyi +++ b/mypy/typeshed/stdlib/email/mime/application.pyi @@ -8,10 +8,10 @@ __all__ = ["MIMEApplication"] class MIMEApplication(MIMENonMultipart): def __init__( self, - _data: str | bytes, - _subtype: str = ..., + _data: str | bytes | bytearray, + _subtype: str = "octet-stream", _encoder: Callable[[MIMEApplication], object] = ..., *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/audio.pyi b/mypy/typeshed/stdlib/email/mime/audio.pyi index b355d55070ad..090dfb960db6 100644 --- a/mypy/typeshed/stdlib/email/mime/audio.pyi +++ b/mypy/typeshed/stdlib/email/mime/audio.pyi @@ -8,10 +8,10 @@ __all__ = ["MIMEAudio"] class MIMEAudio(MIMENonMultipart): def __init__( self, - _audiodata: str | bytes, - _subtype: str | None = ..., + _audiodata: str | bytes | bytearray, + _subtype: str | None = None, _encoder: Callable[[MIMEAudio], object] = ..., *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/base.pyi b/mypy/typeshed/stdlib/email/mime/base.pyi index c8f2fe6db79d..b733709f1b5a 100644 --- a/mypy/typeshed/stdlib/email/mime/base.pyi +++ b/mypy/typeshed/stdlib/email/mime/base.pyi @@ -5,4 +5,4 @@ from email.policy import Policy __all__ = ["MIMEBase"] class MIMEBase(email.message.Message): - def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = ..., **_params: _ParamsType) -> None: ... + def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = None, **_params: _ParamsType) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/image.pyi b/mypy/typeshed/stdlib/email/mime/image.pyi index f575103de2d6..b47afa6ce592 100644 --- a/mypy/typeshed/stdlib/email/mime/image.pyi +++ b/mypy/typeshed/stdlib/email/mime/image.pyi @@ -8,10 +8,10 @@ __all__ = ["MIMEImage"] class MIMEImage(MIMENonMultipart): def __init__( self, - _imagedata: str | bytes, - _subtype: str | None = ..., + _imagedata: str | bytes | bytearray, + _subtype: str | None = None, _encoder: Callable[[MIMEImage], object] = ..., *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/message.pyi b/mypy/typeshed/stdlib/email/mime/message.pyi index 9e7cd04b6e77..23cf58619ad9 100644 --- a/mypy/typeshed/stdlib/email/mime/message.pyi +++ b/mypy/typeshed/stdlib/email/mime/message.pyi @@ -5,4 +5,4 @@ from email.policy import Policy __all__ = ["MIMEMessage"] class MIMEMessage(MIMENonMultipart): - def __init__(self, _msg: Message, _subtype: str = ..., *, policy: Policy | None = ...) -> None: ... + def __init__(self, _msg: Message, _subtype: str = "rfc822", *, policy: Policy | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/multipart.pyi b/mypy/typeshed/stdlib/email/mime/multipart.pyi index 6cd480ccf0a4..6163810ed94a 100644 --- a/mypy/typeshed/stdlib/email/mime/multipart.pyi +++ b/mypy/typeshed/stdlib/email/mime/multipart.pyi @@ -9,10 +9,10 @@ __all__ = ["MIMEMultipart"] class MIMEMultipart(MIMEBase): def __init__( self, - _subtype: str = ..., - boundary: str | None = ..., - _subparts: Sequence[Message] | None = ..., + _subtype: str = "mixed", + boundary: str | None = None, + _subparts: Sequence[Message] | None = None, *, - policy: Policy | None = ..., + policy: Policy | None = None, **_params: _ParamsType, ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/mime/text.pyi b/mypy/typeshed/stdlib/email/mime/text.pyi index 9672c3b717b2..74d5ef4c5cae 100644 --- a/mypy/typeshed/stdlib/email/mime/text.pyi +++ b/mypy/typeshed/stdlib/email/mime/text.pyi @@ -4,4 +4,6 @@ from email.policy import Policy __all__ = ["MIMEText"] class MIMEText(MIMENonMultipart): - def __init__(self, _text: str, _subtype: str = ..., _charset: str | None = ..., *, policy: Policy | None = ...) -> None: ... + def __init__( + self, _text: str, _subtype: str = "plain", _charset: str | None = None, *, policy: Policy | None = None + ) -> None: ... diff --git a/mypy/typeshed/stdlib/email/parser.pyi b/mypy/typeshed/stdlib/email/parser.pyi index dcd346c1b46d..28b6aca856ca 100644 --- a/mypy/typeshed/stdlib/email/parser.pyi +++ b/mypy/typeshed/stdlib/email/parser.pyi @@ -1,27 +1,26 @@ +from _typeshed import SupportsRead from collections.abc import Callable from email.feedparser import BytesFeedParser as BytesFeedParser, FeedParser as FeedParser from email.message import Message from email.policy import Policy -from typing import BinaryIO, TextIO +from typing import IO __all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] class Parser: - def __init__(self, _class: Callable[[], Message] | None = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... - def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... + def __init__(self, _class: Callable[[], Message] | None = None, *, policy: Policy = ...) -> None: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = False) -> Message: ... + def parsestr(self, text: str, headersonly: bool = False) -> Message: ... class HeaderParser(Parser): - def __init__(self, _class: Callable[[], Message] | None = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: TextIO, headersonly: bool = ...) -> Message: ... - def parsestr(self, text: str, headersonly: bool = ...) -> Message: ... - -class BytesHeaderParser(BytesParser): - def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... - def parsebytes(self, text: bytes, headersonly: bool = ...) -> Message: ... + def parse(self, fp: SupportsRead[str], headersonly: bool = True) -> Message: ... + def parsestr(self, text: str, headersonly: bool = True) -> Message: ... class BytesParser: def __init__(self, _class: Callable[[], Message] = ..., *, policy: Policy = ...) -> None: ... - def parse(self, fp: BinaryIO, headersonly: bool = ...) -> Message: ... - def parsebytes(self, text: bytes, headersonly: bool = ...) -> Message: ... + def parse(self, fp: IO[bytes], headersonly: bool = False) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = False) -> Message: ... + +class BytesHeaderParser(BytesParser): + def parse(self, fp: IO[bytes], headersonly: bool = True) -> Message: ... + def parsebytes(self, text: bytes | bytearray, headersonly: bool = True) -> Message: ... diff --git a/mypy/typeshed/stdlib/email/quoprimime.pyi b/mypy/typeshed/stdlib/email/quoprimime.pyi index c5d324d17e13..87d08eecc70c 100644 --- a/mypy/typeshed/stdlib/email/quoprimime.pyi +++ b/mypy/typeshed/stdlib/email/quoprimime.pyi @@ -1,3 +1,5 @@ +from collections.abc import Iterable + __all__ = [ "body_decode", "body_encode", @@ -13,13 +15,13 @@ __all__ = [ def header_check(octet: int) -> bool: ... def body_check(octet: int) -> bool: ... -def header_length(bytearray: bytes) -> int: ... -def body_length(bytearray: bytes) -> int: ... -def unquote(s: str | bytes) -> str: ... -def quote(c: str | bytes) -> str: ... -def header_encode(header_bytes: bytes, charset: str = ...) -> str: ... -def body_encode(body: str, maxlinelen: int = ..., eol: str = ...) -> str: ... -def decode(encoded: str, eol: str = ...) -> str: ... +def header_length(bytearray: Iterable[int]) -> int: ... +def body_length(bytearray: Iterable[int]) -> int: ... +def unquote(s: str | bytes | bytearray) -> str: ... +def quote(c: str | bytes | bytearray) -> str: ... +def header_encode(header_bytes: bytes | bytearray, charset: str = "iso-8859-1") -> str: ... +def body_encode(body: str, maxlinelen: int = 76, eol: str = "\n") -> str: ... +def decode(encoded: str, eol: str = "\n") -> str: ... def header_decode(s: str) -> str: ... body_decode = decode diff --git a/mypy/typeshed/stdlib/email/utils.pyi b/mypy/typeshed/stdlib/email/utils.pyi index 480c5f79549d..090ddf9e31bc 100644 --- a/mypy/typeshed/stdlib/email/utils.pyi +++ b/mypy/typeshed/stdlib/email/utils.pyi @@ -28,7 +28,7 @@ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None def quote(str: str) -> str: ... def unquote(str: str) -> str: ... def parseaddr(addr: str | None) -> tuple[str, str]: ... -def formataddr(pair: tuple[str | None, str], charset: str | Charset = ...) -> str: ... +def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ... def getaddresses(fieldvalues: list[str]) -> list[tuple[str, str]]: ... @overload def parsedate(data: None) -> None: ... @@ -49,11 +49,11 @@ else: def parsedate_to_datetime(data: str) -> datetime.datetime: ... def mktime_tz(data: _PDTZ) -> int: ... -def formatdate(timeval: float | None = ..., localtime: bool = ..., usegmt: bool = ...) -> str: ... -def format_datetime(dt: datetime.datetime, usegmt: bool = ...) -> str: ... -def localtime(dt: datetime.datetime | None = ..., isdst: int = ...) -> datetime.datetime: ... -def make_msgid(idstring: str | None = ..., domain: str | None = ...) -> str: ... +def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ... +def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ... +def localtime(dt: datetime.datetime | None = None, isdst: int = -1) -> datetime.datetime: ... +def make_msgid(idstring: str | None = None, domain: str | None = None) -> str: ... def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... -def encode_rfc2231(s: str, charset: str | None = ..., language: str | None = ...) -> str: ... -def collapse_rfc2231_value(value: _ParamType, errors: str = ..., fallback_charset: str = ...) -> str: ... +def encode_rfc2231(s: str, charset: str | None = None, language: str | None = None) -> str: ... +def collapse_rfc2231_value(value: _ParamType, errors: str = "replace", fallback_charset: str = "us-ascii") -> str: ... def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/mypy/typeshed/stdlib/encodings/__init__.pyi b/mypy/typeshed/stdlib/encodings/__init__.pyi index d86466762268..2e83f0f65a71 100644 --- a/mypy/typeshed/stdlib/encodings/__init__.pyi +++ b/mypy/typeshed/stdlib/encodings/__init__.pyi @@ -1,5 +1,5 @@ +from _typeshed import Incomplete from codecs import CodecInfo -from typing import Any class CodecRegistryError(LookupError, SystemError): ... @@ -7,4 +7,4 @@ def normalize_encoding(encoding: str | bytes) -> str: ... def search_function(encoding: str) -> CodecInfo | None: ... # Needed for submodules -def __getattr__(name: str) -> Any: ... # incomplete +def __getattr__(name: str) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8.pyi b/mypy/typeshed/stdlib/encodings/utf_8.pyi index 568fa6013373..0de51026f9f5 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8.pyi @@ -1,20 +1,21 @@ import codecs +from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): - def encode(self, input: str, final: bool = ...) -> bytes: ... + def encode(self, input: str, final: bool = False) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): @staticmethod - def _buffer_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def _buffer_decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): @staticmethod - def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): @staticmethod - def decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + def decode(__data: ReadableBuffer, __errors: str | None = None, __final: bool = False) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... -def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... +def encode(__str: str, __errors: str | None = None) -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi index bf52e8a6f3d3..150fe22f8f6e 100644 --- a/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi +++ b/mypy/typeshed/stdlib/encodings/utf_8_sig.pyi @@ -1,27 +1,22 @@ import codecs +from _typeshed import ReadableBuffer class IncrementalEncoder(codecs.IncrementalEncoder): - def __init__(self, errors: str = ...) -> None: ... - def encode(self, input: str, final: bool = ...) -> bytes: ... - def reset(self) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... + def encode(self, input: str, final: bool = False) -> bytes: ... def getstate(self) -> int: ... # type: ignore[override] def setstate(self, state: int) -> None: ... # type: ignore[override] class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def __init__(self, errors: str = ...) -> None: ... - def _buffer_decode(self, input: bytes, errors: str | None, final: bool) -> tuple[str, int]: ... - def reset(self) -> None: ... - def getstate(self) -> tuple[bytes, int]: ... - def setstate(self, state: tuple[bytes, int]) -> None: ... + def __init__(self, errors: str = "strict") -> None: ... + def _buffer_decode(self, input: ReadableBuffer, errors: str | None, final: bool) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): - def reset(self) -> None: ... - def encode(self, input: str, errors: str | None = ...) -> tuple[bytes, int]: ... + def encode(self, input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): - def reset(self) -> None: ... - def decode(self, input: bytes, errors: str | None = ...) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... -def encode(input: str, errors: str | None = ...) -> tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... +def encode(input: str, errors: str | None = "strict") -> tuple[bytes, int]: ... +def decode(input: ReadableBuffer, errors: str | None = "strict") -> tuple[str, int]: ... diff --git a/mypy/typeshed/stdlib/ensurepip/__init__.pyi b/mypy/typeshed/stdlib/ensurepip/__init__.pyi index e2686b8d5437..332fb1845917 100644 --- a/mypy/typeshed/stdlib/ensurepip/__init__.pyi +++ b/mypy/typeshed/stdlib/ensurepip/__init__.pyi @@ -3,10 +3,10 @@ __all__ = ["version", "bootstrap"] def version() -> str: ... def bootstrap( *, - root: str | None = ..., - upgrade: bool = ..., - user: bool = ..., - altinstall: bool = ..., - default_pip: bool = ..., - verbosity: int = ..., + root: str | None = None, + upgrade: bool = False, + user: bool = False, + altinstall: bool = False, + default_pip: bool = False, + verbosity: int = 0, ) -> None: ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index a14744f1ba8d..b46fe429cacb 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -1,11 +1,12 @@ +import _typeshed import sys import types -from _typeshed import Self, SupportsKeysAndGetItem +from _typeshed import SupportsKeysAndGetItem, Unused from abc import ABCMeta from builtins import property as _builtins_property from collections.abc import Iterable, Iterator, Mapping from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] @@ -80,19 +81,21 @@ class _EnumDict(dict[str, Any]): class EnumMeta(ABCMeta): if sys.version_info >= (3, 11): def __new__( - metacls: type[Self], + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, *, - boundary: FlagBoundary | None = ..., - _simple: bool = ..., + boundary: FlagBoundary | None = None, + _simple: bool = False, **kwds: Any, - ) -> Self: ... + ) -> _typeshed.Self: ... elif sys.version_info >= (3, 9): - def __new__(metacls: type[Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any) -> Self: ... + def __new__( + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any + ) -> _typeshed.Self: ... else: - def __new__(metacls: type[Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> Self: ... + def __new__(metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> _typeshed.Self: ... if sys.version_info >= (3, 9): @classmethod @@ -112,7 +115,7 @@ class EnumMeta(ABCMeta): def __dir__(self) -> list[str]: ... # Simple value lookup @overload # type: ignore[override] - def __call__(cls: type[_EnumMemberT], value: Any, names: None = ...) -> _EnumMemberT: ... + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... # Functional Enum API if sys.version_info >= (3, 11): @overload @@ -121,11 +124,11 @@ class EnumMeta(ABCMeta): value: str, names: _EnumNames, *, - module: str | None = ..., - qualname: str | None = ..., - type: type | None = ..., - start: int = ..., - boundary: FlagBoundary | None = ..., + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + boundary: FlagBoundary | None = None, ) -> type[Enum]: ... else: @overload @@ -134,10 +137,10 @@ class EnumMeta(ABCMeta): value: str, names: _EnumNames, *, - module: str | None = ..., - qualname: str | None = ..., - type: type | None = ..., - start: int = ..., + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, ) -> type[Enum]: ... _member_names_: list[str] # undocumented _member_map_: dict[str, Enum] # undocumented @@ -174,10 +177,10 @@ class Enum(metaclass=EnumMeta): # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` # (see #7752, #2539, mypy/#5788), # and in practice using `object` here has the same effect as using `Any`. - def __new__(cls: type[Self], value: object) -> Self: ... + def __new__(cls, value: object) -> Self: ... def __dir__(self) -> list[str]: ... def __format__(self, format_spec: str) -> str: ... - def __reduce_ex__(self, proto: object) -> tuple[Any, ...]: ... + def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... if sys.version_info >= (3, 11): class ReprEnum(Enum): ... @@ -191,7 +194,7 @@ class IntEnum(int, _IntEnumBase): _value_: int @_magic_enum_attr def value(self) -> int: ... - def __new__(cls: type[Self], value: int) -> Self: ... + def __new__(cls, value: int) -> Self: ... def unique(enumeration: _EnumerationT) -> _EnumerationT: ... @@ -202,7 +205,7 @@ class auto(IntFlag): _value_: Any @_magic_enum_attr def value(self) -> Any: ... - def __new__(cls: type[Self]) -> Self: ... + def __new__(cls) -> Self: ... class Flag(Enum): _name_: str | None # type: ignore[assignment] @@ -211,14 +214,14 @@ class Flag(Enum): def name(self) -> str | None: ... # type: ignore[override] @_magic_enum_attr def value(self) -> int: ... - def __contains__(self: Self, other: Self) -> bool: ... + def __contains__(self, other: Self) -> bool: ... def __bool__(self) -> bool: ... - def __or__(self: Self, other: Self) -> Self: ... - def __and__(self: Self, other: Self) -> Self: ... - def __xor__(self: Self, other: Self) -> Self: ... - def __invert__(self: Self) -> Self: ... + def __or__(self, other: Self) -> Self: ... + def __and__(self, other: Self) -> Self: ... + def __xor__(self, other: Self) -> Self: ... + def __invert__(self) -> Self: ... if sys.version_info >= (3, 11): - def __iter__(self: Self) -> Iterator[Self]: ... + def __iter__(self) -> Iterator[Self]: ... def __len__(self) -> int: ... __ror__ = __or__ __rand__ = __and__ @@ -226,28 +229,28 @@ class Flag(Enum): if sys.version_info >= (3, 11): # The body of the class is the same, but the base classes are different. - class IntFlag(int, ReprEnum, Flag, boundary=KEEP): - def __new__(cls: type[Self], value: int) -> Self: ... - def __or__(self: Self, other: int) -> Self: ... - def __and__(self: Self, other: int) -> Self: ... - def __xor__(self: Self, other: int) -> Self: ... + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ else: - class IntFlag(int, Flag): - def __new__(cls: type[Self], value: int) -> Self: ... - def __or__(self: Self, other: int) -> Self: ... - def __and__(self: Self, other: int) -> Self: ... - def __xor__(self: Self, other: int) -> Self: ... + class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): - def __new__(cls: type[Self], value: str) -> Self: ... + def __new__(cls, value: str) -> Self: ... _value_: str @_magic_enum_attr def value(self) -> str: ... @@ -275,6 +278,6 @@ if sys.version_info >= (3, 11): KEEP = FlagBoundary.KEEP def global_str(self: Enum) -> str: ... - def global_enum(cls: _EnumerationT, update_str: bool = ...) -> _EnumerationT: ... + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... def global_enum_repr(self: Enum) -> str: ... def global_flag_repr(self: Flag) -> str: ... diff --git a/mypy/typeshed/stdlib/fcntl.pyi b/mypy/typeshed/stdlib/fcntl.pyi index 69863bf580fa..90676e365712 100644 --- a/mypy/typeshed/stdlib/fcntl.pyi +++ b/mypy/typeshed/stdlib/fcntl.pyi @@ -101,16 +101,16 @@ if sys.platform != "win32": I_SWROPT: int I_UNLINK: int @overload - def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = ...) -> int: ... + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = 0) -> int: ... @overload - def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: bytes) -> bytes: ... + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: str | ReadOnlyBuffer) -> bytes: ... @overload - def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = ..., __mutate_flag: bool = ...) -> int: ... + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = 0, __mutate_flag: bool = True) -> int: ... @overload - def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[True] = ...) -> int: ... + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[True] = True) -> int: ... @overload def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[False]) -> bytes: ... @overload - def ioctl(__fd: FileDescriptorLike, __request: int, __arg: ReadOnlyBuffer, __mutate_flag: bool = ...) -> bytes: ... + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: ReadOnlyBuffer, __mutate_flag: bool = True) -> bytes: ... def flock(__fd: FileDescriptorLike, __operation: int) -> None: ... - def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = ..., __start: int = ..., __whence: int = ...) -> Any: ... + def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = 0, __start: int = 0, __whence: int = 0) -> Any: ... diff --git a/mypy/typeshed/stdlib/filecmp.pyi b/mypy/typeshed/stdlib/filecmp.pyi index dd4a0628b026..008d7a44e6c4 100644 --- a/mypy/typeshed/stdlib/filecmp.pyi +++ b/mypy/typeshed/stdlib/filecmp.pyi @@ -12,9 +12,9 @@ __all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] DEFAULT_IGNORES: list[str] BUFSIZE: Literal[8192] -def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = ...) -> bool: ... +def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: bool | Literal[0, 1] = True) -> bool: ... def cmpfiles( - a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = ... + a: GenericPath[AnyStr], b: GenericPath[AnyStr], common: Iterable[GenericPath[AnyStr]], shallow: bool | Literal[0, 1] = True ) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... class dircmp(Generic[AnyStr]): @@ -22,8 +22,8 @@ class dircmp(Generic[AnyStr]): self, a: GenericPath[AnyStr], b: GenericPath[AnyStr], - ignore: Sequence[AnyStr] | None = ..., - hide: Sequence[AnyStr] | None = ..., + ignore: Sequence[AnyStr] | None = None, + hide: Sequence[AnyStr] | None = None, ) -> None: ... left: AnyStr right: AnyStr diff --git a/mypy/typeshed/stdlib/fileinput.pyi b/mypy/typeshed/stdlib/fileinput.pyi index e0babbcd40cc..e9f3713b4eaf 100644 --- a/mypy/typeshed/stdlib/fileinput.pyi +++ b/mypy/typeshed/stdlib/fileinput.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import AnyStr_co, Self, StrOrBytesPath +from _typeshed import AnyStr_co, StrOrBytesPath from collections.abc import Callable, Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, Protocol, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -36,89 +36,89 @@ if sys.version_info >= (3, 10): # encoding and errors are added @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> FileInput[str]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., - encoding: None = ..., - errors: None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, ) -> FileInput[bytes]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> FileInput[Any]: ... elif sys.version_info >= (3, 8): # bufsize is dropped and mode and openhook become keyword-only @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> FileInput[str]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> FileInput[bytes]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... else: @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> FileInput[str]: ... # Because mode isn't keyword-only here yet, we need two overloads each for # the bytes case and the fallback case. @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> FileInput[bytes]: ... @overload def input( @@ -127,17 +127,17 @@ else: backup: str, bufsize: int, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> FileInput[bytes]: ... @overload def input( - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... @overload def input( @@ -146,7 +146,7 @@ else: backup: str, bufsize: int, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> FileInput[Any]: ... def close() -> None: ... @@ -164,38 +164,38 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): @overload def __init__( self: FileInput[str], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self: FileInput[bytes], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., - encoding: None = ..., - errors: None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, + encoding: None = None, + errors: None = None, ) -> None: ... @overload def __init__( self: FileInput[Any], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... elif sys.version_info >= (3, 8): @@ -203,57 +203,57 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): @overload def __init__( self: FileInput[str], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> None: ... @overload def __init__( self: FileInput[bytes], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> None: ... @overload def __init__( self: FileInput[Any], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> None: ... else: @overload def __init__( self: FileInput[str], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., - mode: _TextMode = ..., - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, + mode: _TextMode = "r", + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[str]] | None = None, ) -> None: ... # Because mode isn't keyword-only here yet, we need two overloads each for # the bytes case and the fallback case. @overload def __init__( self: FileInput[bytes], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> None: ... @overload def __init__( @@ -263,18 +263,18 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): backup: str, bufsize: int, mode: Literal["rb"], - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[bytes]] | None = None, ) -> None: ... @overload def __init__( self: FileInput[Any], - files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = ..., - inplace: bool = ..., - backup: str = ..., - bufsize: int = ..., + files: StrOrBytesPath | Iterable[StrOrBytesPath] | None = None, + inplace: bool = False, + backup: str = "", + bufsize: int = 0, *, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> None: ... @overload def __init__( @@ -284,16 +284,16 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): backup: str, bufsize: int, mode: str, - openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = ..., + openhook: Callable[[StrOrBytesPath, str], _HasReadlineAndFileno[Any]] | None = None, ) -> None: ... def __del__(self) -> None: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> AnyStr: ... if sys.version_info < (3, 11): def __getitem__(self, i: int) -> AnyStr: ... @@ -311,10 +311,10 @@ class FileInput(Iterator[AnyStr], Generic[AnyStr]): if sys.version_info >= (3, 10): def hook_compressed( - filename: StrOrBytesPath, mode: str, *, encoding: str | None = ..., errors: str | None = ... + filename: StrOrBytesPath, mode: str, *, encoding: str | None = None, errors: str | None = None ) -> IO[Any]: ... else: def hook_compressed(filename: StrOrBytesPath, mode: str) -> IO[Any]: ... -def hook_encoded(encoding: str, errors: str | None = ...) -> Callable[[StrOrBytesPath, str], IO[Any]]: ... +def hook_encoded(encoding: str, errors: str | None = None) -> Callable[[StrOrBytesPath, str], IO[Any]]: ... diff --git a/mypy/typeshed/stdlib/formatter.pyi b/mypy/typeshed/stdlib/formatter.pyi index 0aac0a5f918c..05c3c8b3dd41 100644 --- a/mypy/typeshed/stdlib/formatter.pyi +++ b/mypy/typeshed/stdlib/formatter.pyi @@ -8,11 +8,11 @@ _StylesType: TypeAlias = tuple[Any, ...] class NullFormatter: writer: NullWriter | None - def __init__(self, writer: NullWriter | None = ...) -> None: ... + def __init__(self, writer: NullWriter | None = None) -> None: ... def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... - def add_label_data(self, format: str, counter: int, blankline: int | None = ...) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... def add_flowing_data(self, data: str) -> None: ... def add_literal_data(self, data: str) -> None: ... def flush_softspace(self) -> None: ... @@ -24,8 +24,8 @@ class NullFormatter: def pop_margin(self) -> None: ... def set_spacing(self, spacing: str | None) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... - def pop_style(self, n: int = ...) -> None: ... - def assert_line_data(self, flag: int = ...) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... class AbstractFormatter: writer: NullWriter @@ -45,7 +45,7 @@ class AbstractFormatter: def end_paragraph(self, blankline: int) -> None: ... def add_line_break(self) -> None: ... def add_hor_rule(self, *args: Any, **kw: Any) -> None: ... - def add_label_data(self, format: str, counter: int, blankline: int | None = ...) -> None: ... + def add_label_data(self, format: str, counter: int, blankline: int | None = None) -> None: ... def format_counter(self, format: Iterable[str], counter: int) -> str: ... def format_letter(self, case: str, counter: int) -> str: ... def format_roman(self, case: str, counter: int) -> str: ... @@ -60,11 +60,10 @@ class AbstractFormatter: def pop_margin(self) -> None: ... def set_spacing(self, spacing: str | None) -> None: ... def push_style(self, *styles: _StylesType) -> None: ... - def pop_style(self, n: int = ...) -> None: ... - def assert_line_data(self, flag: int = ...) -> None: ... + def pop_style(self, n: int = 1) -> None: ... + def assert_line_data(self, flag: int = 1) -> None: ... class NullWriter: - def __init__(self) -> None: ... def flush(self) -> None: ... def new_alignment(self, align: str | None) -> None: ... def new_font(self, font: _FontType) -> None: ... @@ -78,28 +77,12 @@ class NullWriter: def send_flowing_data(self, data: str) -> None: ... def send_literal_data(self, data: str) -> None: ... -class AbstractWriter(NullWriter): - def new_alignment(self, align: str | None) -> None: ... - def new_font(self, font: _FontType) -> None: ... - def new_margin(self, margin: int, level: int) -> None: ... - def new_spacing(self, spacing: str | None) -> None: ... - def new_styles(self, styles: tuple[Any, ...]) -> None: ... - def send_paragraph(self, blankline: int) -> None: ... - def send_line_break(self) -> None: ... - def send_hor_rule(self, *args: Any, **kw: Any) -> None: ... - def send_label_data(self, data: str) -> None: ... - def send_flowing_data(self, data: str) -> None: ... - def send_literal_data(self, data: str) -> None: ... +class AbstractWriter(NullWriter): ... class DumbWriter(NullWriter): file: IO[str] maxcol: int - def __init__(self, file: IO[str] | None = ..., maxcol: int = ...) -> None: ... + def __init__(self, file: IO[str] | None = None, maxcol: int = 72) -> None: ... def reset(self) -> None: ... - def send_paragraph(self, blankline: int) -> None: ... - def send_line_break(self) -> None: ... - def send_hor_rule(self, *args: Any, **kw: Any) -> None: ... - def send_literal_data(self, data: str) -> None: ... - def send_flowing_data(self, data: str) -> None: ... -def test(file: str | None = ...) -> None: ... +def test(file: str | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/fractions.pyi b/mypy/typeshed/stdlib/fractions.pyi index e05f59e3d191..97cefc916d9b 100644 --- a/mypy/typeshed/stdlib/fractions.pyi +++ b/mypy/typeshed/stdlib/fractions.pyi @@ -1,10 +1,9 @@ import sys -from _typeshed import Self from collections.abc import Callable from decimal import Decimal from numbers import Integral, Rational, Real from typing import Any, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias _ComparableNum: TypeAlias = int | float | Decimal | Real @@ -24,15 +23,15 @@ else: class Fraction(Rational): @overload def __new__( - cls: type[Self], numerator: int | Rational = ..., denominator: int | Rational | None = ..., *, _normalize: bool = ... + cls, numerator: int | Rational = 0, denominator: int | Rational | None = None, *, _normalize: bool = True ) -> Self: ... @overload - def __new__(cls: type[Self], __value: float | Decimal | str, *, _normalize: bool = ...) -> Self: ... + def __new__(cls, __value: float | Decimal | str, *, _normalize: bool = True) -> Self: ... @classmethod - def from_float(cls: type[Self], f: float) -> Self: ... + def from_float(cls, f: float) -> Self: ... @classmethod - def from_decimal(cls: type[Self], dec: Decimal) -> Self: ... - def limit_denominator(self, max_denominator: int = ...) -> Fraction: ... + def from_decimal(cls, dec: Decimal) -> Self: ... + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... if sys.version_info >= (3, 8): def as_integer_ratio(self) -> tuple[int, int]: ... @@ -129,7 +128,7 @@ class Fraction(Rational): def __floor__(a) -> int: ... def __ceil__(a) -> int: ... @overload - def __round__(self, ndigits: None = ...) -> int: ... + def __round__(self, ndigits: None = None) -> int: ... @overload def __round__(self, ndigits: int) -> Fraction: ... def __hash__(self) -> int: ... @@ -139,8 +138,8 @@ class Fraction(Rational): def __le__(a, b: _ComparableNum) -> bool: ... def __ge__(a, b: _ComparableNum) -> bool: ... def __bool__(a) -> bool: ... - def __copy__(self: Self) -> Self: ... - def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... if sys.version_info >= (3, 11): def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... # Not actually defined within fractions.py, but provides more useful diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index 3d284c597019..76d9dc02a5da 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -1,11 +1,11 @@ import sys -from _typeshed import Self, SupportsRead, SupportsReadline +from _typeshed import SupportsRead, SupportsReadline from collections.abc import Callable, Iterable, Iterator from socket import socket from ssl import SSLContext from types import TracebackType from typing import Any, TextIO -from typing_extensions import Literal +from typing_extensions import Literal, Self __all__ = ["FTP", "error_reply", "error_temp", "error_perm", "error_proto", "all_errors", "FTP_TLS"] @@ -36,7 +36,7 @@ class FTP: lastresp: str file: TextIO | None encoding: str - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -44,28 +44,28 @@ class FTP: if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, *, - encoding: str = ..., + encoding: str = "utf-8", ) -> None: ... else: def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, ) -> None: ... def connect( - self, host: str = ..., port: int = ..., timeout: float = ..., source_address: tuple[str, int] | None = ... + self, host: str = "", port: int = 0, timeout: float = -999, source_address: tuple[str, int] | None = None ) -> str: ... def getwelcome(self) -> str: ... def set_debuglevel(self, level: int) -> None: ... @@ -85,28 +85,28 @@ class FTP: def sendeprt(self, host: str, port: int) -> str: ... def makeport(self) -> socket: ... def makepasv(self) -> tuple[str, int]: ... - def login(self, user: str = ..., passwd: str = ..., acct: str = ...) -> str: ... + def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ... # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. - def ntransfercmd(self, cmd: str, rest: int | str | None = ...) -> tuple[socket, int]: ... - def transfercmd(self, cmd: str, rest: int | str | None = ...) -> socket: ... + def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int]: ... + def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ... def retrbinary( - self, cmd: str, callback: Callable[[bytes], object], blocksize: int = ..., rest: int | str | None = ... + self, cmd: str, callback: Callable[[bytes], object], blocksize: int = 8192, rest: int | str | None = None ) -> str: ... def storbinary( self, cmd: str, fp: SupportsRead[bytes], - blocksize: int = ..., - callback: Callable[[bytes], object] | None = ..., - rest: int | str | None = ..., + blocksize: int = 8192, + callback: Callable[[bytes], object] | None = None, + rest: int | str | None = None, ) -> str: ... - def retrlines(self, cmd: str, callback: Callable[[str], object] | None = ...) -> str: ... - def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = ...) -> str: ... + def retrlines(self, cmd: str, callback: Callable[[str], object] | None = None) -> str: ... + def storlines(self, cmd: str, fp: SupportsReadline[bytes], callback: Callable[[bytes], object] | None = None) -> str: ... def acct(self, password: str) -> str: ... def nlst(self, *args: str) -> list[str]: ... # Technically only the last arg can be a Callable but ... def dir(self, *args: str | Callable[[str], object]) -> None: ... - def mlsd(self, path: str = ..., facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... + def mlsd(self, path: str = "", facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... def rename(self, fromname: str, toname: str) -> str: ... def delete(self, filename: str) -> str: ... def cwd(self, dirname: str) -> str: ... @@ -121,36 +121,36 @@ class FTP_TLS(FTP): if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - context: SSLContext | None = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, *, - encoding: str = ..., + encoding: str = "utf-8", ) -> None: ... else: def __init__( self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - context: SSLContext | None = ..., + host: str = "", + user: str = "", + passwd: str = "", + acct: str = "", + keyfile: str | None = None, + certfile: str | None = None, + context: SSLContext | None = None, timeout: float = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, ) -> None: ... ssl_version: int keyfile: str | None certfile: str | None context: SSLContext - def login(self, user: str = ..., passwd: str = ..., acct: str = ..., secure: bool = ...) -> str: ... + def login(self, user: str = "", passwd: str = "", acct: str = "", secure: bool = True) -> str: ... def auth(self) -> str: ... def prot_p(self) -> str: ... def prot_c(self) -> str: ... @@ -161,5 +161,5 @@ def parse227(resp: str) -> tuple[str, int]: ... # undocumented def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented def parse257(resp: str) -> str: ... # undocumented def ftpcp( - source: FTP, sourcename: str, target: FTP, targetname: str = ..., type: Literal["A", "I"] = ... + source: FTP, sourcename: str, target: FTP, targetname: str = "", type: Literal["A", "I"] = "I" ) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index 5c3f662c3dd5..1214e349f605 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -1,9 +1,9 @@ import sys import types -from _typeshed import IdentityFunction, Self, SupportsAllComparisons, SupportsItems +from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -55,15 +55,15 @@ class _lru_cache_wrapper(Generic[_T]): if sys.version_info >= (3, 8): @overload - def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... @overload - def lru_cache(maxsize: Callable[..., _T], typed: bool = ...) -> _lru_cache_wrapper[_T]: ... + def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... else: - def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... WRAPPER_ASSIGNMENTS: tuple[ - Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"], + Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] @@ -79,7 +79,7 @@ class partial(Generic[_T]): def args(self) -> tuple[Any, ...]: ... @property def keywords(self) -> dict[str, Any]: ... - def __new__(cls: type[Self], __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... + def __new__(cls, __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -96,7 +96,7 @@ class partialmethod(Generic[_T]): @overload def __init__(self, __func: _Descriptor, *args: Any, **keywords: Any) -> None: ... if sys.version_info >= (3, 8): - def __get__(self, obj: Any, cls: type[Any] | None = ...) -> Callable[..., _T]: ... + def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... else: def __get__(self, obj: Any, cls: type[Any] | None) -> Callable[..., _T]: ... @@ -111,11 +111,11 @@ class _SingleDispatchCallable(Generic[_T]): # @fun.register(complex) # def _(arg, verbose=False): ... @overload - def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: type[Any], func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... # @fun.register # def _(arg: int, verbose=False): @overload - def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... + def register(self, cls: Callable[..., _T], func: None = None) -> Callable[..., _T]: ... # fun.register(int, lambda x: x) @overload def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... @@ -132,21 +132,21 @@ if sys.version_info >= (3, 8): @property def __isabstractmethod__(self) -> bool: ... @overload - def register(self, cls: type[Any], method: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: type[Any], method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload - def register(self, cls: Callable[..., _T], method: None = ...) -> Callable[..., _T]: ... + def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... @overload def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... - def __get__(self, obj: _S, cls: type[_S] | None = ...) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = None) -> Callable[..., _T]: ... class cached_property(Generic[_T]): func: Callable[[Any], _T] attrname: str | None def __init__(self, func: Callable[[Any], _T]) -> None: ... @overload - def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... + def __get__(self, instance: None, owner: type[Any] | None = None) -> cached_property[_T]: ... @overload - def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... def __set_name__(self, owner: type[Any], name: str) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/gc.pyi b/mypy/typeshed/stdlib/gc.pyi index d24b7c1f4c7c..27cee726ba09 100644 --- a/mypy/typeshed/stdlib/gc.pyi +++ b/mypy/typeshed/stdlib/gc.pyi @@ -14,14 +14,14 @@ _CallbackType: TypeAlias = Callable[[Literal["start", "stop"], dict[str, int]], callbacks: list[_CallbackType] garbage: list[Any] -def collect(generation: int = ...) -> int: ... +def collect(generation: int = 2) -> int: ... def disable() -> None: ... def enable() -> None: ... def get_count() -> tuple[int, int, int]: ... def get_debug() -> int: ... if sys.version_info >= (3, 8): - def get_objects(generation: int | None = ...) -> list[Any]: ... + def get_objects(generation: int | None = None) -> list[Any]: ... else: def get_objects() -> list[Any]: ... diff --git a/mypy/typeshed/stdlib/genericpath.pyi b/mypy/typeshed/stdlib/genericpath.pyi index 911d582fd538..46426b63c852 100644 --- a/mypy/typeshed/stdlib/genericpath.pyi +++ b/mypy/typeshed/stdlib/genericpath.pyi @@ -1,5 +1,5 @@ import os -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRichComparisonT +from _typeshed import BytesPath, FileDescriptorOrPath, StrPath, SupportsRichComparisonT from collections.abc import Sequence from typing import overload from typing_extensions import Literal, LiteralString @@ -31,16 +31,16 @@ def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... @overload def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... -def exists(path: StrOrBytesPath | int) -> bool: ... -def getsize(filename: StrOrBytesPath | int) -> int: ... -def isfile(path: StrOrBytesPath | int) -> bool: ... -def isdir(s: StrOrBytesPath | int) -> bool: ... +def exists(path: FileDescriptorOrPath) -> bool: ... +def getsize(filename: FileDescriptorOrPath) -> int: ... +def isfile(path: FileDescriptorOrPath) -> bool: ... +def isdir(s: FileDescriptorOrPath) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. -def getatime(filename: StrOrBytesPath | int) -> float: ... -def getmtime(filename: StrOrBytesPath | int) -> float: ... -def getctime(filename: StrOrBytesPath | int) -> float: ... -def samefile(f1: StrOrBytesPath | int, f2: StrOrBytesPath | int) -> bool: ... +def getatime(filename: FileDescriptorOrPath) -> float: ... +def getmtime(filename: FileDescriptorOrPath) -> float: ... +def getctime(filename: FileDescriptorOrPath) -> float: ... +def samefile(f1: FileDescriptorOrPath, f2: FileDescriptorOrPath) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... diff --git a/mypy/typeshed/stdlib/getopt.pyi b/mypy/typeshed/stdlib/getopt.pyi index 42ddb1cb7020..14d63dbd6f99 100644 --- a/mypy/typeshed/stdlib/getopt.pyi +++ b/mypy/typeshed/stdlib/getopt.pyi @@ -6,6 +6,6 @@ def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tu class GetoptError(Exception): msg: str opt: str - def __init__(self, msg: str, opt: str = ...) -> None: ... + def __init__(self, msg: str, opt: str = "") -> None: ... error = GetoptError diff --git a/mypy/typeshed/stdlib/getpass.pyi b/mypy/typeshed/stdlib/getpass.pyi index 153db2f4cb9e..6104e0dedfee 100644 --- a/mypy/typeshed/stdlib/getpass.pyi +++ b/mypy/typeshed/stdlib/getpass.pyi @@ -2,7 +2,7 @@ from typing import TextIO __all__ = ["getpass", "getuser", "GetPassWarning"] -def getpass(prompt: str = ..., stream: TextIO | None = ...) -> str: ... +def getpass(prompt: str = "Password: ", stream: TextIO | None = None) -> str: ... def getuser() -> str: ... class GetPassWarning(UserWarning): ... diff --git a/mypy/typeshed/stdlib/gettext.pyi b/mypy/typeshed/stdlib/gettext.pyi index 3c07abeb2d8a..5d98227ec1f4 100644 --- a/mypy/typeshed/stdlib/gettext.pyi +++ b/mypy/typeshed/stdlib/gettext.pyi @@ -32,7 +32,7 @@ class _TranslationsReader(Protocol): # name: str class NullTranslations: - def __init__(self, fp: _TranslationsReader | None = ...) -> None: ... + def __init__(self, fp: _TranslationsReader | None = None) -> None: ... def _parse(self, fp: _TranslationsReader) -> None: ... def add_fallback(self, fallback: NullTranslations) -> None: ... def gettext(self, message: str) -> str: ... @@ -49,7 +49,7 @@ class NullTranslations: def lgettext(self, message: str) -> str: ... def lngettext(self, msgid1: str, msgid2: str, n: int) -> str: ... - def install(self, names: Container[str] | None = ...) -> None: ... + def install(self, names: Container[str] | None = None) -> None: ... class GNUTranslations(NullTranslations): LE_MAGIC: Final[int] @@ -59,14 +59,16 @@ class GNUTranslations(NullTranslations): @overload # ignores incompatible overloads def find( # type: ignore[misc] - domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: Literal[False] = ... + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: Literal[False] = False ) -> str | None: ... @overload def find( - domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: Literal[True] = ... + domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, *, all: Literal[True] ) -> list[str]: ... @overload -def find(domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: bool = ...) -> Any: ... +def find(domain: str, localedir: StrPath | None, languages: Iterable[str] | None, all: Literal[True]) -> list[str]: ... +@overload +def find(domain: str, localedir: StrPath | None = None, languages: Iterable[str] | None = None, all: bool = False) -> Any: ... _NullTranslationsT = TypeVar("_NullTranslationsT", bound=NullTranslations) @@ -74,19 +76,19 @@ if sys.version_info >= (3, 11): @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: None = ..., - fallback: Literal[False] = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, ) -> GNUTranslations: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, *, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., + fallback: Literal[False] = False, ) -> _NullTranslationsT: ... @overload def translation( @@ -94,37 +96,37 @@ if sys.version_info >= (3, 11): localedir: StrPath | None, languages: Iterable[str] | None, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., + fallback: Literal[False] = False, ) -> _NullTranslationsT: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: Callable[[io.BufferedReader], NullTranslations] | None = ..., - fallback: bool = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, ) -> NullTranslations: ... - def install(domain: str, localedir: StrPath | None = ..., *, names: Container[str] | None = ...) -> None: ... + def install(domain: str, localedir: StrPath | None = None, *, names: Container[str] | None = None) -> None: ... else: @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: None = ..., - fallback: Literal[False] = ..., - codeset: str | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: None = None, + fallback: Literal[False] = False, + codeset: str | None = None, ) -> GNUTranslations: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, *, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., - codeset: str | None = ..., + fallback: Literal[False] = False, + codeset: str | None = None, ) -> _NullTranslationsT: ... @overload def translation( @@ -132,24 +134,24 @@ else: localedir: StrPath | None, languages: Iterable[str] | None, class_: Callable[[io.BufferedReader], _NullTranslationsT], - fallback: Literal[False] = ..., - codeset: str | None = ..., + fallback: Literal[False] = False, + codeset: str | None = None, ) -> _NullTranslationsT: ... @overload def translation( domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: Callable[[io.BufferedReader], NullTranslations] | None = ..., - fallback: bool = ..., - codeset: str | None = ..., + localedir: StrPath | None = None, + languages: Iterable[str] | None = None, + class_: Callable[[io.BufferedReader], NullTranslations] | None = None, + fallback: bool = False, + codeset: str | None = None, ) -> NullTranslations: ... def install( - domain: str, localedir: StrPath | None = ..., codeset: str | None = ..., names: Container[str] | None = ... + domain: str, localedir: StrPath | None = None, codeset: str | None = None, names: Container[str] | None = None ) -> None: ... -def textdomain(domain: str | None = ...) -> str: ... -def bindtextdomain(domain: str, localedir: StrPath | None = ...) -> str: ... +def textdomain(domain: str | None = None) -> str: ... +def bindtextdomain(domain: str, localedir: StrPath | None = None) -> str: ... def dgettext(domain: str, message: str) -> str: ... def dngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... def gettext(message: str) -> str: ... @@ -166,6 +168,6 @@ if sys.version_info < (3, 11): def ldgettext(domain: str, message: str) -> str: ... def lngettext(msgid1: str, msgid2: str, n: int) -> str: ... def ldngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... - def bind_textdomain_codeset(domain: str, codeset: str | None = ...) -> str: ... + def bind_textdomain_codeset(domain: str, codeset: str | None = None) -> str: ... Catalog = translation diff --git a/mypy/typeshed/stdlib/glob.pyi b/mypy/typeshed/stdlib/glob.pyi index c63563d19f58..914ccc12ef1e 100644 --- a/mypy/typeshed/stdlib/glob.pyi +++ b/mypy/typeshed/stdlib/glob.pyi @@ -12,31 +12,31 @@ if sys.version_info >= (3, 11): def glob( pathname: AnyStr, *, - root_dir: StrOrBytesPath | None = ..., - dir_fd: int | None = ..., - recursive: bool = ..., - include_hidden: bool = ..., + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, ) -> list[AnyStr]: ... def iglob( pathname: AnyStr, *, - root_dir: StrOrBytesPath | None = ..., - dir_fd: int | None = ..., - recursive: bool = ..., - include_hidden: bool = ..., + root_dir: StrOrBytesPath | None = None, + dir_fd: int | None = None, + recursive: bool = False, + include_hidden: bool = False, ) -> Iterator[AnyStr]: ... elif sys.version_info >= (3, 10): def glob( - pathname: AnyStr, *, root_dir: StrOrBytesPath | None = ..., dir_fd: int | None = ..., recursive: bool = ... + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False ) -> list[AnyStr]: ... def iglob( - pathname: AnyStr, *, root_dir: StrOrBytesPath | None = ..., dir_fd: int | None = ..., recursive: bool = ... + pathname: AnyStr, *, root_dir: StrOrBytesPath | None = None, dir_fd: int | None = None, recursive: bool = False ) -> Iterator[AnyStr]: ... else: - def glob(pathname: AnyStr, *, recursive: bool = ...) -> list[AnyStr]: ... - def iglob(pathname: AnyStr, *, recursive: bool = ...) -> Iterator[AnyStr]: ... + def glob(pathname: AnyStr, *, recursive: bool = False) -> list[AnyStr]: ... + def iglob(pathname: AnyStr, *, recursive: bool = False) -> Iterator[AnyStr]: ... def escape(pathname: AnyStr) -> AnyStr: ... def has_magic(s: str | bytes) -> bool: ... # undocumented diff --git a/mypy/typeshed/stdlib/graphlib.pyi b/mypy/typeshed/stdlib/graphlib.pyi index 4c6959decc4b..c02d447ad501 100644 --- a/mypy/typeshed/stdlib/graphlib.pyi +++ b/mypy/typeshed/stdlib/graphlib.pyi @@ -12,7 +12,7 @@ if sys.version_info >= (3, 11): class TopologicalSorter(Generic[_T]): @overload - def __init__(self, graph: None = ...) -> None: ... + def __init__(self, graph: None = None) -> None: ... @overload def __init__(self, graph: SupportsItems[_T, Iterable[_T]]) -> None: ... def add(self, node: _T, *predecessors: _T) -> None: ... diff --git a/mypy/typeshed/stdlib/gzip.pyi b/mypy/typeshed/stdlib/gzip.pyi index abf12925aea2..6a794f381ad6 100644 --- a/mypy/typeshed/stdlib/gzip.pyi +++ b/mypy/typeshed/stdlib/gzip.pyi @@ -1,9 +1,9 @@ import _compression import sys import zlib -from _typeshed import ReadableBuffer, StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath, _BufferWithLen from io import FileIO -from typing import Any, Protocol, TextIO, overload +from typing import Protocol, TextIO, overload from typing_extensions import Literal, TypeAlias if sys.version_info >= (3, 8): @@ -15,20 +15,26 @@ _ReadBinaryMode: TypeAlias = Literal["r", "rb"] _WriteBinaryMode: TypeAlias = Literal["a", "ab", "w", "wb", "x", "xb"] _OpenTextMode: TypeAlias = Literal["rt", "at", "wt", "xt"] -READ: Literal[1] -WRITE: Literal[2] +READ: Literal[1] # undocumented +WRITE: Literal[2] # undocumented + +FTEXT: int # actually Literal[1] # undocumented +FHCRC: int # actually Literal[2] # undocumented +FEXTRA: int # actually Literal[4] # undocumented +FNAME: int # actually Literal[8] # undocumented +FCOMMENT: int # actually Literal[16] # undocumented class _ReadableFileobj(Protocol): def read(self, __n: int) -> bytes: ... - def seek(self, __n: int) -> Any: ... + def seek(self, __n: int) -> object: ... # The following attributes and methods are optional: # name: str # mode: str # def fileno() -> int: ... class _WritableFileobj(Protocol): - def write(self, __b: bytes) -> Any: ... - def flush(self) -> Any: ... + def write(self, __b: bytes) -> object: ... + def flush(self) -> object: ... # The following attributes and methods are optional: # name: str # mode: str @@ -37,45 +43,45 @@ class _WritableFileobj(Protocol): @overload def open( filename: StrOrBytesPath | _ReadableFileobj, - mode: _ReadBinaryMode = ..., - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + mode: _ReadBinaryMode = "rb", + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> GzipFile: ... @overload def open( filename: StrOrBytesPath | _WritableFileobj, mode: _WriteBinaryMode, - compresslevel: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + compresslevel: int = 9, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> GzipFile: ... @overload def open( filename: StrOrBytesPath, mode: _OpenTextMode, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, mode: str, - compresslevel: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + compresslevel: int = 9, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> GzipFile | TextIO: ... class _PaddedFile: file: _ReadableFileobj - def __init__(self, f: _ReadableFileobj, prepend: bytes = ...) -> None: ... + def __init__(self, f: _ReadableFileobj, prepend: bytes = b"") -> None: ... def read(self, size: int) -> bytes: ... - def prepend(self, prepend: bytes = ...) -> None: ... + def prepend(self, prepend: bytes = b"") -> None: ... def seek(self, off: int) -> int: ... def seekable(self) -> bool: ... @@ -93,45 +99,45 @@ class GzipFile(_compression.BaseStream): self, filename: StrOrBytesPath | None, mode: _ReadBinaryMode, - compresslevel: int = ..., - fileobj: _ReadableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, *, mode: _ReadBinaryMode, - compresslevel: int = ..., - fileobj: _ReadableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _ReadableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, filename: StrOrBytesPath | None, mode: _WriteBinaryMode, - compresslevel: int = ..., - fileobj: _WritableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, *, mode: _WriteBinaryMode, - compresslevel: int = ..., - fileobj: _WritableFileobj | None = ..., - mtime: float | None = ..., + compresslevel: int = 9, + fileobj: _WritableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @overload def __init__( self, - filename: StrOrBytesPath | None = ..., - mode: str | None = ..., - compresslevel: int = ..., - fileobj: _ReadableFileobj | _WritableFileobj | None = ..., - mtime: float | None = ..., + filename: StrOrBytesPath | None = None, + mode: str | None = None, + compresslevel: int = 9, + fileobj: _ReadableFileobj | _WritableFileobj | None = None, + mtime: float | None = None, ) -> None: ... @property def filename(self) -> str: ... @@ -139,29 +145,23 @@ class GzipFile(_compression.BaseStream): def mtime(self) -> int | None: ... crc: int def write(self, data: ReadableBuffer) -> int: ... - def read(self, size: int | None = ...) -> bytes: ... - def read1(self, size: int = ...) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... def peek(self, n: int) -> bytes: ... - @property - def closed(self) -> bool: ... def close(self) -> None: ... - def flush(self, zlib_mode: int = ...) -> None: ... + def flush(self, zlib_mode: int = 2) -> None: ... def fileno(self) -> int: ... def rewind(self) -> None: ... - def readable(self) -> bool: ... - def writable(self) -> bool: ... - def seekable(self) -> bool: ... - def seek(self, offset: int, whence: int = ...) -> int: ... - def readline(self, size: int | None = ...) -> bytes: ... + def seek(self, offset: int, whence: int = 0) -> int: ... + def readline(self, size: int | None = -1) -> bytes: ... class _GzipReader(_compression.DecompressReader): def __init__(self, fp: _ReadableFileobj) -> None: ... - def read(self, size: int = ...) -> bytes: ... if sys.version_info >= (3, 8): - def compress(data: bytes, compresslevel: int = ..., *, mtime: float | None = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = 9, *, mtime: float | None = None) -> bytes: ... else: - def compress(data: bytes, compresslevel: int = ...) -> bytes: ... + def compress(data: _BufferWithLen, compresslevel: int = 9) -> bytes: ... -def decompress(data: bytes) -> bytes: ... +def decompress(data: ReadableBuffer) -> bytes: ... diff --git a/mypy/typeshed/stdlib/hashlib.pyi b/mypy/typeshed/stdlib/hashlib.pyi index 2a417364b171..18b1ab549764 100644 --- a/mypy/typeshed/stdlib/hashlib.pyi +++ b/mypy/typeshed/stdlib/hashlib.pyi @@ -1,8 +1,8 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer from collections.abc import Callable, Set as AbstractSet from typing import Protocol -from typing_extensions import final +from typing_extensions import Self, final if sys.version_info >= (3, 11): __all__ = ( @@ -56,31 +56,31 @@ class _Hash: @property def name(self) -> str: ... def __init__(self, data: ReadableBuffer = ...) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... def update(self, __data: ReadableBuffer) -> None: ... if sys.version_info >= (3, 9): - def new(name: str, data: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def md5(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha1(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha224(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha256(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha384(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... - def sha512(string: ReadableBuffer = ..., *, usedforsecurity: bool = ...) -> _Hash: ... + def new(name: str, data: ReadableBuffer = b"", *, usedforsecurity: bool = ...) -> _Hash: ... + def md5(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha1(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha224(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha256(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha384(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... + def sha512(string: ReadableBuffer = b"", *, usedforsecurity: bool = True) -> _Hash: ... elif sys.version_info >= (3, 8): - def new(name: str, data: ReadableBuffer = ...) -> _Hash: ... - def md5(string: ReadableBuffer = ...) -> _Hash: ... - def sha1(string: ReadableBuffer = ...) -> _Hash: ... - def sha224(string: ReadableBuffer = ...) -> _Hash: ... - def sha256(string: ReadableBuffer = ...) -> _Hash: ... - def sha384(string: ReadableBuffer = ...) -> _Hash: ... - def sha512(string: ReadableBuffer = ...) -> _Hash: ... + def new(name: str, data: ReadableBuffer = b"") -> _Hash: ... + def md5(string: ReadableBuffer = b"") -> _Hash: ... + def sha1(string: ReadableBuffer = b"") -> _Hash: ... + def sha224(string: ReadableBuffer = b"") -> _Hash: ... + def sha256(string: ReadableBuffer = b"") -> _Hash: ... + def sha384(string: ReadableBuffer = b"") -> _Hash: ... + def sha512(string: ReadableBuffer = b"") -> _Hash: ... else: - def new(name: str, data: ReadableBuffer = ...) -> _Hash: ... + def new(name: str, data: ReadableBuffer = b"") -> _Hash: ... def md5(__string: ReadableBuffer = ...) -> _Hash: ... def sha1(__string: ReadableBuffer = ...) -> _Hash: ... def sha224(__string: ReadableBuffer = ...) -> _Hash: ... @@ -92,7 +92,7 @@ algorithms_guaranteed: AbstractSet[str] algorithms_available: AbstractSet[str] def pbkdf2_hmac( - hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = ... + hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = None ) -> bytes: ... class _VarLenHash: @@ -115,12 +115,12 @@ shake_256 = _VarLenHash def scrypt( password: ReadableBuffer, *, - salt: ReadableBuffer | None = ..., - n: int | None = ..., - r: int | None = ..., - p: int | None = ..., - maxmem: int = ..., - dklen: int = ..., + salt: ReadableBuffer | None = None, + n: int | None = None, + r: int | None = None, + p: int | None = None, + maxmem: int = 0, + dklen: int = 64, ) -> bytes: ... @final class _BlakeHash(_Hash): @@ -177,5 +177,5 @@ if sys.version_info >= (3, 11): def readable(self) -> bool: ... def file_digest( - __fileobj: _BytesIOLike | _FileDigestFileObj, __digest: str | Callable[[], _Hash], *, _bufsize: int = ... + __fileobj: _BytesIOLike | _FileDigestFileObj, __digest: str | Callable[[], _Hash], *, _bufsize: int = 262144 ) -> _Hash: ... diff --git a/mypy/typeshed/stdlib/heapq.pyi b/mypy/typeshed/stdlib/heapq.pyi index b280322685db..61418b3704d6 100644 --- a/mypy/typeshed/stdlib/heapq.pyi +++ b/mypy/typeshed/stdlib/heapq.pyi @@ -2,16 +2,17 @@ from _heapq import * from _typeshed import SupportsRichComparison from collections.abc import Callable, Iterable from typing import Any, TypeVar +from typing_extensions import Final __all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] _S = TypeVar("_S") -__about__: str +__about__: Final[str] def merge( - *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ..., reverse: bool = ... + *iterables: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None, reverse: bool = False ) -> Iterable[_S]: ... -def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ...) -> list[_S]: ... -def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ...) -> list[_S]: ... +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = None) -> list[_S]: ... def _heapify_max(__x: list[Any]) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/hmac.pyi b/mypy/typeshed/stdlib/hmac.pyi index af69fc7ea46d..b9a867f7bd61 100644 --- a/mypy/typeshed/stdlib/hmac.pyi +++ b/mypy/typeshed/stdlib/hmac.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, _BufferWithLen from collections.abc import Callable from types import ModuleType from typing import Any, AnyStr, overload @@ -18,19 +18,19 @@ if sys.version_info >= (3, 8): # In reality digestmod has a default value, but the function always throws an error # if the argument is not given, so we pretend it is a required argument. @overload - def new(key: bytes, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None, digestmod: _DigestMod) -> HMAC: ... @overload - def new(key: bytes, *, digestmod: _DigestMod) -> HMAC: ... + def new(key: bytes | bytearray, *, digestmod: _DigestMod) -> HMAC: ... else: - def new(key: bytes, msg: ReadableBuffer | None = ..., digestmod: _DigestMod | None = ...) -> HMAC: ... + def new(key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod | None = None) -> HMAC: ... class HMAC: digest_size: int block_size: int @property def name(self) -> str: ... - def __init__(self, key: bytes, msg: ReadableBuffer | None = ..., digestmod: _DigestMod = ...) -> None: ... + def __init__(self, key: bytes | bytearray, msg: ReadableBuffer | None = None, digestmod: _DigestMod = "") -> None: ... def update(self, msg: ReadableBuffer) -> None: ... def digest(self) -> bytes: ... def hexdigest(self) -> str: ... @@ -40,4 +40,4 @@ class HMAC: def compare_digest(__a: ReadableBuffer, __b: ReadableBuffer) -> bool: ... @overload def compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... -def digest(key: bytes, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... +def digest(key: _BufferWithLen, msg: ReadableBuffer, digest: _DigestMod) -> bytes: ... diff --git a/mypy/typeshed/stdlib/html/__init__.pyi b/mypy/typeshed/stdlib/html/__init__.pyi index 109c5f4b50fb..afba90832535 100644 --- a/mypy/typeshed/stdlib/html/__init__.pyi +++ b/mypy/typeshed/stdlib/html/__init__.pyi @@ -2,5 +2,5 @@ from typing import AnyStr __all__ = ["escape", "unescape"] -def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... +def escape(s: AnyStr, quote: bool = True) -> AnyStr: ... def unescape(s: AnyStr) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/html/parser.pyi b/mypy/typeshed/stdlib/html/parser.pyi index 2948eadc9800..d322ade965d9 100644 --- a/mypy/typeshed/stdlib/html/parser.pyi +++ b/mypy/typeshed/stdlib/html/parser.pyi @@ -4,11 +4,9 @@ from re import Pattern __all__ = ["HTMLParser"] class HTMLParser(ParserBase): - def __init__(self, *, convert_charrefs: bool = ...) -> None: ... + def __init__(self, *, convert_charrefs: bool = True) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> None: ... - def reset(self) -> None: ... - def getpos(self) -> tuple[int, int]: ... def get_starttag_text(self) -> str | None: ... def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... @@ -19,7 +17,6 @@ class HTMLParser(ParserBase): def handle_comment(self, data: str) -> None: ... def handle_decl(self, decl: str) -> None: ... def handle_pi(self, data: str) -> None: ... - def unknown_decl(self, data: str) -> None: ... CDATA_CONTENT_ELEMENTS: tuple[str, ...] def check_for_whole_start_tag(self, i: int) -> int: ... # undocumented def clear_cdata_mode(self) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 08c3f2c8be0b..b1506b50e750 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -2,11 +2,11 @@ import email.message import io import ssl import types -from _typeshed import Self, WriteableBuffer +from _typeshed import ReadableBuffer, SupportsRead, WriteableBuffer from collections.abc import Callable, Iterable, Iterator, Mapping from socket import socket -from typing import IO, Any, BinaryIO, TypeVar, overload -from typing_extensions import TypeAlias +from typing import Any, BinaryIO, TypeVar, overload +from typing_extensions import Self, TypeAlias __all__ = [ "HTTPResponse", @@ -30,7 +30,7 @@ __all__ = [ "HTTPSConnection", ] -_DataType: TypeAlias = bytes | IO[Any] | Iterable[bytes] | str +_DataType: TypeAlias = SupportsRead[bytes] | Iterable[ReadableBuffer] | ReadableBuffer _T = TypeVar("_T") HTTP_PORT: int @@ -114,21 +114,20 @@ class HTTPResponse(io.BufferedIOBase, BinaryIO): chunk_left: int | None length: int | None will_close: bool - def __init__(self, sock: socket, debuglevel: int = ..., method: str | None = ..., url: str | None = ...) -> None: ... - def peek(self, n: int = ...) -> bytes: ... - def read(self, amt: int | None = ...) -> bytes: ... - def read1(self, n: int = ...) -> bytes: ... + def __init__(self, sock: socket, debuglevel: int = 0, method: str | None = None, url: str | None = None) -> None: ... + def peek(self, n: int = -1) -> bytes: ... + def read(self, amt: int | None = None) -> bytes: ... + def read1(self, n: int = -1) -> bytes: ... def readinto(self, b: WriteableBuffer) -> int: ... - def readline(self, limit: int = ...) -> bytes: ... # type: ignore[override] + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] @overload def getheader(self, name: str) -> str | None: ... @overload def getheader(self, name: str, default: _T) -> str | _T: ... def getheaders(self) -> list[tuple[str, str]]: ... - def fileno(self) -> int: ... def isclosed(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -149,23 +148,29 @@ class HTTPConnection: def __init__( self, host: str, - port: int | None = ..., + port: int | None = None, timeout: float | None = ..., - source_address: tuple[str, int] | None = ..., - blocksize: int = ..., + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, ) -> None: ... def request( - self, method: str, url: str, body: _DataType | None = ..., headers: Mapping[str, str] = ..., *, encode_chunked: bool = ... + self, + method: str, + url: str, + body: _DataType | str | None = None, + headers: Mapping[str, str] = ..., + *, + encode_chunked: bool = False, ) -> None: ... def getresponse(self) -> HTTPResponse: ... def set_debuglevel(self, level: int) -> None: ... - def set_tunnel(self, host: str, port: int | None = ..., headers: Mapping[str, str] | None = ...) -> None: ... + def set_tunnel(self, host: str, port: int | None = None, headers: Mapping[str, str] | None = None) -> None: ... def connect(self) -> None: ... def close(self) -> None: ... - def putrequest(self, method: str, url: str, skip_host: bool = ..., skip_accept_encoding: bool = ...) -> None: ... + def putrequest(self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False) -> None: ... def putheader(self, header: str, *argument: str) -> None: ... - def endheaders(self, message_body: _DataType | None = ..., *, encode_chunked: bool = ...) -> None: ... - def send(self, data: _DataType) -> None: ... + def endheaders(self, message_body: _DataType | None = None, *, encode_chunked: bool = False) -> None: ... + def send(self, data: _DataType | str) -> None: ... class HTTPSConnection(HTTPConnection): # Can be `None` if `.connect()` was not called: @@ -173,15 +178,15 @@ class HTTPSConnection(HTTPConnection): def __init__( self, host: str, - port: int | None = ..., - key_file: str | None = ..., - cert_file: str | None = ..., + port: int | None = None, + key_file: str | None = None, + cert_file: str | None = None, timeout: float | None = ..., - source_address: tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = None, *, - context: ssl.SSLContext | None = ..., - check_hostname: bool | None = ..., - blocksize: int = ..., + context: ssl.SSLContext | None = None, + check_hostname: bool | None = None, + blocksize: int = 8192, ) -> None: ... class HTTPException(Exception): ... @@ -198,7 +203,7 @@ class UnknownTransferEncoding(HTTPException): ... class UnimplementedFileMode(HTTPException): ... class IncompleteRead(HTTPException): - def __init__(self, partial: bytes, expected: int | None = ...) -> None: ... + def __init__(self, partial: bytes, expected: int | None = None) -> None: ... partial: bytes expected: int | None diff --git a/mypy/typeshed/stdlib/http/cookiejar.pyi b/mypy/typeshed/stdlib/http/cookiejar.pyi index dc3c0e17d336..7f2c9c6cc8f4 100644 --- a/mypy/typeshed/stdlib/http/cookiejar.pyi +++ b/mypy/typeshed/stdlib/http/cookiejar.pyi @@ -28,14 +28,14 @@ class CookieJar(Iterable[Cookie]): domain_re: ClassVar[Pattern[str]] # undocumented dots_re: ClassVar[Pattern[str]] # undocumented magic_re: ClassVar[Pattern[str]] # undocumented - def __init__(self, policy: CookiePolicy | None = ...) -> None: ... + def __init__(self, policy: CookiePolicy | None = None) -> None: ... def add_cookie_header(self, request: Request) -> None: ... def extract_cookies(self, response: HTTPResponse, request: Request) -> None: ... def set_policy(self, policy: CookiePolicy) -> None: ... def make_cookies(self, response: HTTPResponse, request: Request) -> Sequence[Cookie]: ... def set_cookie(self, cookie: Cookie) -> None: ... def set_cookie_if_ok(self, cookie: Cookie, request: Request) -> None: ... - def clear(self, domain: str | None = ..., path: str | None = ..., name: str | None = ...) -> None: ... + def clear(self, domain: str | None = None, path: str | None = None, name: str | None = None) -> None: ... def clear_session_cookies(self) -> None: ... def clear_expired_cookies(self) -> None: ... # undocumented def __iter__(self) -> Iterator[Cookie]: ... @@ -45,20 +45,22 @@ class FileCookieJar(CookieJar): filename: str delayload: bool if sys.version_info >= (3, 8): - def __init__(self, filename: StrPath | None = ..., delayload: bool = ..., policy: CookiePolicy | None = ...) -> None: ... + def __init__( + self, filename: StrPath | None = None, delayload: bool = False, policy: CookiePolicy | None = None + ) -> None: ... else: - def __init__(self, filename: str | None = ..., delayload: bool = ..., policy: CookiePolicy | None = ...) -> None: ... + def __init__(self, filename: str | None = None, delayload: bool = False, policy: CookiePolicy | None = None) -> None: ... - def save(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... - def load(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... - def revert(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... + def save(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def load(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... + def revert(self, filename: str | None = None, ignore_discard: bool = False, ignore_expires: bool = False) -> None: ... class MozillaCookieJar(FileCookieJar): if sys.version_info < (3, 10): header: ClassVar[str] # undocumented class LWPCookieJar(FileCookieJar): - def as_lwp_str(self, ignore_discard: bool = ..., ignore_expires: bool = ...) -> str: ... # undocumented + def as_lwp_str(self, ignore_discard: bool = True, ignore_expires: bool = True) -> str: ... # undocumented class CookiePolicy: netscape: bool @@ -85,35 +87,35 @@ class DefaultCookiePolicy(CookiePolicy): if sys.version_info >= (3, 8): def __init__( self, - blocked_domains: Sequence[str] | None = ..., - allowed_domains: Sequence[str] | None = ..., - netscape: bool = ..., - rfc2965: bool = ..., - rfc2109_as_netscape: bool | None = ..., - hide_cookie2: bool = ..., - strict_domain: bool = ..., - strict_rfc2965_unverifiable: bool = ..., - strict_ns_unverifiable: bool = ..., - strict_ns_domain: int = ..., - strict_ns_set_initial_dollar: bool = ..., - strict_ns_set_path: bool = ..., + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, secure_protocols: Sequence[str] = ..., ) -> None: ... else: def __init__( self, - blocked_domains: Sequence[str] | None = ..., - allowed_domains: Sequence[str] | None = ..., - netscape: bool = ..., - rfc2965: bool = ..., - rfc2109_as_netscape: bool | None = ..., - hide_cookie2: bool = ..., - strict_domain: bool = ..., - strict_rfc2965_unverifiable: bool = ..., - strict_ns_unverifiable: bool = ..., - strict_ns_domain: int = ..., - strict_ns_set_initial_dollar: bool = ..., - strict_ns_set_path: bool = ..., + blocked_domains: Sequence[str] | None = None, + allowed_domains: Sequence[str] | None = None, + netscape: bool = True, + rfc2965: bool = False, + rfc2109_as_netscape: bool | None = None, + hide_cookie2: bool = False, + strict_domain: bool = False, + strict_rfc2965_unverifiable: bool = True, + strict_ns_unverifiable: bool = False, + strict_ns_domain: int = 0, + strict_ns_set_initial_dollar: bool = False, + strict_ns_set_path: bool = False, ) -> None: ... def blocked_domains(self) -> tuple[str, ...]: ... @@ -170,7 +172,7 @@ class Cookie: comment: str | None, comment_url: str | None, rest: dict[str, str], - rfc2109: bool = ..., + rfc2109: bool = False, ) -> None: ... def has_nonstandard_attr(self, name: str) -> bool: ... @overload @@ -178,4 +180,4 @@ class Cookie: @overload def get_nonstandard_attr(self, name: str, default: _T) -> str | _T: ... def set_nonstandard_attr(self, name: str, value: str) -> None: ... - def is_expired(self, now: int | None = ...) -> bool: ... + def is_expired(self, now: int | None = None) -> bool: ... diff --git a/mypy/typeshed/stdlib/http/cookies.pyi b/mypy/typeshed/stdlib/http/cookies.pyi index e2fe44d305ef..e24ef9cbdd2e 100644 --- a/mypy/typeshed/stdlib/http/cookies.pyi +++ b/mypy/typeshed/stdlib/http/cookies.pyi @@ -31,29 +31,29 @@ class Morsel(dict[str, Any], Generic[_T]): def key(self) -> str: ... def __init__(self) -> None: ... def set(self, key: str, val: str, coded_val: _T) -> None: ... - def setdefault(self, key: str, val: str | None = ...) -> str: ... + def setdefault(self, key: str, val: str | None = None) -> str: ... # The dict update can also get a keywords argument so this is incompatible @overload # type: ignore[override] def update(self, values: Mapping[str, str]) -> None: ... @overload def update(self, values: Iterable[tuple[str, str]]) -> None: ... def isReservedKey(self, K: str) -> bool: ... - def output(self, attrs: list[str] | None = ..., header: str = ...) -> str: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:") -> str: ... __str__ = output - def js_output(self, attrs: list[str] | None = ...) -> str: ... - def OutputString(self, attrs: list[str] | None = ...) -> str: ... + def js_output(self, attrs: list[str] | None = None) -> str: ... + def OutputString(self, attrs: list[str] | None = None) -> str: ... def __eq__(self, morsel: object) -> bool: ... def __setitem__(self, K: str, V: Any) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): - def __init__(self, input: _DataType | None = ...) -> None: ... + def __init__(self, input: _DataType | None = None) -> None: ... def value_decode(self, val: str) -> _T: ... def value_encode(self, val: _T) -> str: ... - def output(self, attrs: list[str] | None = ..., header: str = ..., sep: str = ...) -> str: ... + def output(self, attrs: list[str] | None = None, header: str = "Set-Cookie:", sep: str = "\r\n") -> str: ... __str__ = output - def js_output(self, attrs: list[str] | None = ...) -> str: ... + def js_output(self, attrs: list[str] | None = None) -> str: ... def load(self, rawdata: _DataType) -> None: ... def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... diff --git a/mypy/typeshed/stdlib/http/server.pyi b/mypy/typeshed/stdlib/http/server.pyi index e73497bb18bc..c9700f70e791 100644 --- a/mypy/typeshed/stdlib/http/server.pyi +++ b/mypy/typeshed/stdlib/http/server.pyi @@ -1,6 +1,8 @@ +import _socket import email.message import io import socketserver +import sys from _typeshed import StrPath, SupportsRead, SupportsWrite from collections.abc import Mapping, Sequence from typing import Any, AnyStr, BinaryIO, ClassVar @@ -11,12 +13,10 @@ class HTTPServer(socketserver.TCPServer): server_name: str server_port: int -class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): - daemon_threads: bool # undocumented +class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer): ... class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): client_address: tuple[str, int] - server: socketserver.BaseServer close_connection: bool requestline: str command: str @@ -33,30 +33,34 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): default_request_version: str # undocumented weekdayname: ClassVar[Sequence[str]] # undocumented monthname: ClassVar[Sequence[str | None]] # undocumented - def __init__(self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer) -> None: ... - def handle(self) -> None: ... def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... - def send_error(self, code: int, message: str | None = ..., explain: str | None = ...) -> None: ... - def send_response(self, code: int, message: str | None = ...) -> None: ... + def send_error(self, code: int, message: str | None = None, explain: str | None = None) -> None: ... + def send_response(self, code: int, message: str | None = None) -> None: ... def send_header(self, keyword: str, value: str) -> None: ... - def send_response_only(self, code: int, message: str | None = ...) -> None: ... + def send_response_only(self, code: int, message: str | None = None) -> None: ... def end_headers(self) -> None: ... def flush_headers(self) -> None: ... - def log_request(self, code: int | str = ..., size: int | str = ...) -> None: ... + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: ... def log_error(self, format: str, *args: Any) -> None: ... def log_message(self, format: str, *args: Any) -> None: ... def version_string(self) -> str: ... - def date_time_string(self, timestamp: int | None = ...) -> str: ... + def date_time_string(self, timestamp: int | None = None) -> str: ... def log_date_time_string(self) -> str: ... def address_string(self) -> str: ... def parse_request(self) -> bool: ... # undocumented class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): - server_version: str extensions_map: dict[str, str] + if sys.version_info >= (3, 12): + index_pages: ClassVar[tuple[str, ...]] def __init__( - self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer, directory: str | None = ... + self, + request: socketserver._RequestType, + client_address: _socket._RetAddress, + server: socketserver.BaseServer, + *, + directory: str | None = None, ) -> None: ... def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... diff --git a/mypy/typeshed/stdlib/imaplib.pyi b/mypy/typeshed/stdlib/imaplib.pyi index a313b20a999f..1c2112dd37c8 100644 --- a/mypy/typeshed/stdlib/imaplib.pyi +++ b/mypy/typeshed/stdlib/imaplib.pyi @@ -1,15 +1,16 @@ import subprocess import sys import time -from _typeshed import Self +from _typeshed import ReadableBuffer, _BufferWithLen from builtins import list as _list # conflicts with a method named "list" from collections.abc import Callable +from datetime import datetime from re import Pattern from socket import socket as _socket from ssl import SSLContext, SSLSocket from types import TracebackType -from typing import IO, Any -from typing_extensions import Literal, TypeAlias +from typing import IO, Any, SupportsAbs, SupportsInt +from typing_extensions import Literal, Self, TypeAlias __all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] @@ -40,11 +41,11 @@ class IMAP4: capabilities: tuple[str, ...] PROTOCOL_VERSION: str if sys.version_info >= (3, 9): - def __init__(self, host: str = ..., port: int = ..., timeout: float | None = ...) -> None: ... - def open(self, host: str = ..., port: int = ..., timeout: float | None = ...) -> None: ... + def __init__(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... + def open(self, host: str = "", port: int = 143, timeout: float | None = None) -> None: ... else: - def __init__(self, host: str = ..., port: int = ...) -> None: ... - def open(self, host: str = ..., port: int = ...) -> None: ... + def __init__(self, host: str = "", port: int = 143) -> None: ... + def open(self, host: str = "", port: int = 143) -> None: ... def __getattr__(self, attr: str) -> Any: ... host: str @@ -53,12 +54,12 @@ class IMAP4: file: IO[str] | IO[bytes] def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... - def send(self, data: bytes) -> None: ... + def send(self, data: ReadableBuffer) -> None: ... def shutdown(self) -> None: ... def socket(self) -> _socket: ... def recent(self) -> _CommandResults: ... def response(self, code: str) -> _CommandResults: ... - def append(self, mailbox: str, flags: str, date_time: str, message: bytes) -> str: ... + def append(self, mailbox: str, flags: str, date_time: str, message: ReadableBuffer) -> str: ... def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... def capability(self) -> _CommandResults: ... def check(self) -> _CommandResults: ... @@ -68,7 +69,7 @@ class IMAP4: def delete(self, mailbox: str) -> _CommandResults: ... def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... def enable(self, capability: str) -> _CommandResults: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def expunge(self) -> _CommandResults: ... def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... @@ -76,11 +77,11 @@ class IMAP4: def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... def getquota(self, root: str) -> _CommandResults: ... def getquotaroot(self, mailbox: str) -> _CommandResults: ... - def list(self, directory: str = ..., pattern: str = ...) -> tuple[str, _AnyResponseData]: ... + def list(self, directory: str = '""', pattern: str = "*") -> tuple[str, _AnyResponseData]: ... def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... def logout(self) -> tuple[str, _AnyResponseData]: ... - def lsub(self, directory: str = ..., pattern: str = ...) -> _CommandResults: ... + def lsub(self, directory: str = '""', pattern: str = "*") -> _CommandResults: ... def myrights(self, mailbox: str) -> _CommandResults: ... def namespace(self) -> _CommandResults: ... def noop(self) -> tuple[str, _list[bytes]]: ... @@ -88,12 +89,12 @@ class IMAP4: def proxyauth(self, user: str) -> _CommandResults: ... def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... - def select(self, mailbox: str = ..., readonly: bool = ...) -> tuple[str, _list[bytes | None]]: ... + def select(self, mailbox: str = "INBOX", readonly: bool = False) -> tuple[str, _list[bytes | None]]: ... def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... def setannotation(self, *args: str) -> _CommandResults: ... def setquota(self, root: str, limits: str) -> _CommandResults: ... def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... - def starttls(self, ssl_context: Any | None = ...) -> tuple[Literal["OK"], _list[None]]: ... + def starttls(self, ssl_context: Any | None = None) -> tuple[Literal["OK"], _list[None]]: ... def status(self, mailbox: str, names: str) -> _CommandResults: ... def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... def subscribe(self, mailbox: str) -> _CommandResults: ... @@ -112,67 +113,51 @@ class IMAP4_SSL(IMAP4): if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - port: int = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - ssl_context: SSLContext | None = ..., - timeout: float | None = ..., + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, + timeout: float | None = None, ) -> None: ... else: def __init__( self, - host: str = ..., - port: int = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - ssl_context: SSLContext | None = ..., + host: str = "", + port: int = 993, + keyfile: str | None = None, + certfile: str | None = None, + ssl_context: SSLContext | None = None, ) -> None: ... - host: str - port: int - sock: _socket sslobj: SSLSocket file: IO[Any] if sys.version_info >= (3, 9): - def open(self, host: str = ..., port: int | None = ..., timeout: float | None = ...) -> None: ... + def open(self, host: str = "", port: int | None = 993, timeout: float | None = None) -> None: ... else: - def open(self, host: str = ..., port: int | None = ...) -> None: ... + def open(self, host: str = "", port: int | None = 993) -> None: ... - def read(self, size: int) -> bytes: ... - def readline(self) -> bytes: ... - def send(self, data: bytes) -> None: ... - def shutdown(self) -> None: ... - def socket(self) -> _socket: ... def ssl(self) -> SSLSocket: ... class IMAP4_stream(IMAP4): command: str def __init__(self, command: str) -> None: ... - host: str - port: int - sock: _socket file: IO[Any] process: subprocess.Popen[bytes] writefile: IO[Any] readfile: IO[Any] if sys.version_info >= (3, 9): - def open(self, host: str | None = ..., port: int | None = ..., timeout: float | None = ...) -> None: ... + def open(self, host: str | None = None, port: int | None = None, timeout: float | None = None) -> None: ... else: - def open(self, host: str | None = ..., port: int | None = ...) -> None: ... - - def read(self, size: int) -> bytes: ... - def readline(self) -> bytes: ... - def send(self, data: bytes) -> None: ... - def shutdown(self) -> None: ... + def open(self, host: str | None = None, port: int | None = None) -> None: ... class _Authenticator: - mech: Callable[[bytes], bytes] - def __init__(self, mechinst: Callable[[bytes], bytes]) -> None: ... + mech: Callable[[bytes], bytes | bytearray | memoryview | str | None] + def __init__(self, mechinst: Callable[[bytes], bytes | bytearray | memoryview | str | None]) -> None: ... def process(self, data: str) -> str: ... - def encode(self, inp: bytes) -> str: ... - def decode(self, inp: str) -> bytes: ... + def encode(self, inp: bytes | bytearray | memoryview) -> str: ... + def decode(self, inp: str | _BufferWithLen) -> bytes: ... -def Internaldate2tuple(resp: bytes) -> time.struct_time: ... -def Int2AP(num: int) -> str: ... -def ParseFlags(resp: bytes) -> tuple[bytes, ...]: ... -def Time2Internaldate(date_time: float | time.struct_time | str) -> str: ... +def Internaldate2tuple(resp: ReadableBuffer) -> time.struct_time | None: ... +def Int2AP(num: SupportsAbs[SupportsInt]) -> bytes: ... +def ParseFlags(resp: ReadableBuffer) -> tuple[bytes, ...]: ... +def Time2Internaldate(date_time: float | time.struct_time | time._TimeTuple | datetime | str) -> str: ... diff --git a/mypy/typeshed/stdlib/imghdr.pyi b/mypy/typeshed/stdlib/imghdr.pyi index 5f439779a69c..ed3647f20fc5 100644 --- a/mypy/typeshed/stdlib/imghdr.pyi +++ b/mypy/typeshed/stdlib/imghdr.pyi @@ -10,7 +10,7 @@ class _ReadableBinary(Protocol): def seek(self, offset: int) -> Any: ... @overload -def what(file: StrPath | _ReadableBinary, h: None = ...) -> str | None: ... +def what(file: StrPath | _ReadableBinary, h: None = None) -> str | None: ... @overload def what(file: Any, h: bytes) -> str | None: ... diff --git a/mypy/typeshed/stdlib/imp.pyi b/mypy/typeshed/stdlib/imp.pyi index 3054a4465f99..3f2920de9c2b 100644 --- a/mypy/typeshed/stdlib/imp.pyi +++ b/mypy/typeshed/stdlib/imp.pyi @@ -1,9 +1,4 @@ import types -from _typeshed import StrPath -from os import PathLike -from types import TracebackType -from typing import IO, Any, Protocol - from _imp import ( acquire_lock as acquire_lock, create_dynamic as create_dynamic, @@ -15,6 +10,10 @@ from _imp import ( lock_held as lock_held, release_lock as release_lock, ) +from _typeshed import StrPath +from os import PathLike +from types import TracebackType +from typing import IO, Any, Protocol SEARCH_ERROR: int PY_SOURCE: int @@ -30,7 +29,7 @@ IMP_HOOK: int def new_module(name: str) -> types.ModuleType: ... def get_magic() -> bytes: ... def get_tag() -> str: ... -def cache_from_source(path: StrPath, debug_override: bool | None = ...) -> str: ... +def cache_from_source(path: StrPath, debug_override: bool | None = None) -> str: ... def source_from_cache(path: StrPath) -> str: ... def get_suffixes() -> list[tuple[str, str, int]]: ... @@ -49,15 +48,15 @@ class _FileLike(Protocol): def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> Any: ... # PathLike doesn't work for the pathname argument here -def load_source(name: str, pathname: str, file: _FileLike | None = ...) -> types.ModuleType: ... -def load_compiled(name: str, pathname: str, file: _FileLike | None = ...) -> types.ModuleType: ... +def load_source(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... +def load_compiled(name: str, pathname: str, file: _FileLike | None = None) -> types.ModuleType: ... def load_package(name: str, path: StrPath) -> types.ModuleType: ... def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... # IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. def find_module( - name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = ... + name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = None ) -> tuple[IO[Any], str, tuple[str, str, int]]: ... def reload(module: types.ModuleType) -> types.ModuleType: ... def init_builtin(name: str) -> types.ModuleType | None: ... -def load_dynamic(name: str, path: str, file: Any = ...) -> types.ModuleType: ... # file argument is ignored +def load_dynamic(name: str, path: str, file: Any = None) -> types.ModuleType: ... # file argument is ignored diff --git a/mypy/typeshed/stdlib/importlib/__init__.pyi b/mypy/typeshed/stdlib/importlib/__init__.pyi index 42401a00bdeb..1747b274136e 100644 --- a/mypy/typeshed/stdlib/importlib/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/__init__.pyi @@ -7,14 +7,14 @@ __all__ = ["__import__", "import_module", "invalidate_caches", "reload"] # Signature of `builtins.__import__` should be kept identical to `importlib.__import__` def __import__( name: str, - globals: Mapping[str, object] | None = ..., - locals: Mapping[str, object] | None = ..., + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, fromlist: Sequence[str] = ..., - level: int = ..., + level: int = 0, ) -> ModuleType: ... # `importlib.import_module` return type should be kept the same as `builtins.__import__` -def import_module(name: str, package: str | None = ...) -> ModuleType: ... -def find_loader(name: str, path: str | None = ...) -> Loader | None: ... +def import_module(name: str, package: str | None = None) -> ModuleType: ... +def find_loader(name: str, path: str | None = None) -> Loader | None: ... def invalidate_caches() -> None: ... def reload(module: ModuleType) -> ModuleType: ... diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index b46d42a4199a..3d0c2d38c4e9 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -6,15 +6,14 @@ from _typeshed import ( OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode, - StrOrBytesPath, - StrPath, + ReadableBuffer, ) from abc import ABCMeta, abstractmethod from collections.abc import Iterator, Mapping, Sequence from importlib.machinery import ModuleSpec from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from typing import IO, Any, BinaryIO, NoReturn, Protocol, overload, runtime_checkable -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal if sys.version_info >= (3, 11): __all__ = [ @@ -32,72 +31,68 @@ if sys.version_info >= (3, 11): "TraversableResources", ] -_Path: TypeAlias = bytes | str - class Finder(metaclass=ABCMeta): ... +class Loader(metaclass=ABCMeta): + def load_module(self, fullname: str) -> types.ModuleType: ... + def module_repr(self, module: types.ModuleType) -> str: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... + # Not defined on the actual class for backwards-compatibility reasons, + # but expected in new code. + def exec_module(self, module: types.ModuleType) -> None: ... + class ResourceLoader(Loader): @abstractmethod - def get_data(self, path: _Path) -> bytes: ... + def get_data(self, path: str) -> bytes: ... class InspectLoader(Loader): def is_package(self, fullname: str) -> bool: ... def get_code(self, fullname: str) -> types.CodeType | None: ... - def load_module(self, fullname: str) -> types.ModuleType: ... @abstractmethod def get_source(self, fullname: str) -> str | None: ... def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod - def source_to_code(data: bytes | str, path: str = ...) -> types.CodeType: ... + def source_to_code(data: ReadableBuffer | str, path: str = "") -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod - def get_filename(self, fullname: str) -> _Path: ... - def get_code(self, fullname: str) -> types.CodeType | None: ... + def get_filename(self, fullname: str) -> str: ... class SourceLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): - def path_mtime(self, path: _Path) -> float: ... - def set_data(self, path: _Path, data: bytes) -> None: ... + def path_mtime(self, path: str) -> float: ... + def set_data(self, path: str, data: bytes) -> None: ... def get_source(self, fullname: str) -> str | None: ... - def path_stats(self, path: _Path) -> Mapping[str, Any]: ... + def path_stats(self, path: str) -> Mapping[str, Any]: ... # Please keep in sync with sys._MetaPathFinder class MetaPathFinder(Finder): - def find_module(self, fullname: str, path: Sequence[_Path] | None) -> Loader | None: ... + def find_module(self, fullname: str, path: Sequence[str] | None) -> Loader | None: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec( - self, fullname: str, path: Sequence[_Path] | None, target: types.ModuleType | None = ... + self, fullname: str, path: Sequence[str] | None, target: types.ModuleType | None = ... ) -> ModuleSpec | None: ... class PathEntryFinder(Finder): def find_module(self, fullname: str) -> Loader | None: ... - def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[_Path]]: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[str]]: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... -class Loader(metaclass=ABCMeta): - def load_module(self, fullname: str) -> types.ModuleType: ... - def module_repr(self, module: types.ModuleType) -> str: ... - def create_module(self, spec: ModuleSpec) -> types.ModuleType | None: ... - # Not defined on the actual class for backwards-compatibility reasons, - # but expected in new code. - def exec_module(self, module: types.ModuleType) -> None: ... - class FileLoader(ResourceLoader, ExecutionLoader, metaclass=ABCMeta): name: str - path: _Path - def __init__(self, fullname: str, path: _Path) -> None: ... - def get_data(self, path: _Path) -> bytes: ... - def get_filename(self, name: str | None = ...) -> _Path: ... - def load_module(self, name: str | None = ...) -> types.ModuleType: ... + path: str + def __init__(self, fullname: str, path: str) -> None: ... + def get_data(self, path: str) -> bytes: ... + def get_filename(self, name: str | None = None) -> str: ... + def load_module(self, name: str | None = None) -> types.ModuleType: ... class ResourceReader(metaclass=ABCMeta): @abstractmethod - def open_resource(self, resource: StrOrBytesPath) -> IO[bytes]: ... + def open_resource(self, resource: str) -> IO[bytes]: ... @abstractmethod - def resource_path(self, resource: StrOrBytesPath) -> str: ... + def resource_path(self, resource: str) -> str: ... if sys.version_info >= (3, 10): @abstractmethod def is_resource(self, path: str) -> bool: ... @@ -117,14 +112,18 @@ if sys.version_info >= (3, 9): def is_file(self) -> bool: ... @abstractmethod def iterdir(self) -> Iterator[Traversable]: ... - @abstractmethod - def joinpath(self, child: StrPath) -> Traversable: ... + if sys.version_info >= (3, 11): + @abstractmethod + def joinpath(self, *descendants: str) -> Traversable: ... + else: + @abstractmethod + def joinpath(self, child: str) -> Traversable: ... # The .open method comes from pathlib.pyi and should be kept in sync. @overload @abstractmethod def open( self, - mode: OpenTextMode = ..., + mode: OpenTextMode = "r", buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., @@ -134,7 +133,7 @@ if sys.version_info >= (3, 9): @overload @abstractmethod def open( - self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None ) -> FileIO: ... # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload @@ -143,9 +142,9 @@ if sys.version_info >= (3, 9): self, mode: OpenBinaryModeUpdating, buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedRandom: ... @overload @abstractmethod @@ -153,9 +152,9 @@ if sys.version_info >= (3, 9): self, mode: OpenBinaryModeWriting, buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedWriter: ... @overload @abstractmethod @@ -163,15 +162,15 @@ if sys.version_info >= (3, 9): self, mode: OpenBinaryModeReading, buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedReader: ... # Buffering cannot be determined: fall back to BinaryIO @overload @abstractmethod def open( - self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = None, errors: None = None, newline: None = None ) -> BinaryIO: ... # Fallback if mode is not specified @overload @@ -180,18 +179,19 @@ if sys.version_info >= (3, 9): self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... ) -> IO[Any]: ... @property + @abstractmethod def name(self) -> str: ... @abstractmethod - def __truediv__(self, child: StrPath) -> Traversable: ... + def __truediv__(self, child: str) -> Traversable: ... @abstractmethod def read_bytes(self) -> bytes: ... @abstractmethod - def read_text(self, encoding: str | None = ...) -> str: ... + def read_text(self, encoding: str | None = None) -> str: ... class TraversableResources(ResourceReader): @abstractmethod def files(self) -> Traversable: ... - def open_resource(self, resource: StrPath) -> BufferedReader: ... # type: ignore[override] + def open_resource(self, resource: str) -> BufferedReader: ... # type: ignore[override] def resource_path(self, resource: Any) -> NoReturn: ... - def is_resource(self, path: StrPath) -> bool: ... + def is_resource(self, path: str) -> bool: ... def contents(self) -> Iterator[str]: ... diff --git a/mypy/typeshed/stdlib/importlib/machinery.pyi b/mypy/typeshed/stdlib/importlib/machinery.pyi index 09abdc6f34fd..5aaefce87e3a 100644 --- a/mypy/typeshed/stdlib/importlib/machinery.pyi +++ b/mypy/typeshed/stdlib/importlib/machinery.pyi @@ -1,6 +1,7 @@ import importlib.abc import sys import types +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable, Sequence from typing import Any @@ -13,9 +14,9 @@ class ModuleSpec: name: str, loader: importlib.abc.Loader | None, *, - origin: str | None = ..., - loader_state: Any = ..., - is_package: bool | None = ..., + origin: str | None = None, + loader_state: Any = None, + is_package: bool | None = None, ) -> None: ... name: str loader: importlib.abc.Loader | None @@ -31,10 +32,10 @@ class ModuleSpec: class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -62,10 +63,10 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... # InspectLoader @classmethod @@ -91,10 +92,10 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): class WindowsRegistryFinder(importlib.abc.MetaPathFinder): @classmethod - def find_module(cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... @classmethod def find_spec( - cls, fullname: str, path: Sequence[importlib.abc._Path] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... class PathFinder: @@ -113,10 +114,10 @@ class PathFinder: @classmethod def find_spec( - cls, fullname: str, path: Sequence[bytes | str] | None = ..., target: types.ModuleType | None = ... + cls, fullname: str, path: Sequence[str] | None = None, target: types.ModuleType | None = None ) -> ModuleSpec | None: ... @classmethod - def find_module(cls, fullname: str, path: Sequence[bytes | str] | None = ...) -> importlib.abc.Loader | None: ... + def find_module(cls, fullname: str, path: Sequence[str] | None = None) -> importlib.abc.Loader | None: ... SOURCE_SUFFIXES: list[str] DEBUG_BYTECODE_SUFFIXES: list[str] @@ -135,16 +136,15 @@ class FileFinder(importlib.abc.PathEntryFinder): ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): - def set_data(self, path: importlib.abc._Path, data: bytes, *, _mode: int = ...) -> None: ... + def set_data(self, path: str, data: ReadableBuffer, *, _mode: int = 0o666) -> None: ... class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... class ExtensionFileLoader(importlib.abc.ExecutionLoader): - def __init__(self, name: str, path: importlib.abc._Path) -> None: ... - def get_filename(self, name: str | None = ...) -> importlib.abc._Path: ... + def __init__(self, name: str, path: str) -> None: ... + def get_filename(self, name: str | None = None) -> str: ... def get_source(self, fullname: str) -> None: ... def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... def exec_module(self, module: types.ModuleType) -> None: ... - def is_package(self, fullname: str) -> bool: ... def get_code(self, fullname: str) -> None: ... def __eq__(self, other: object) -> bool: ... diff --git a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi index 99fecb41497d..083453cd3c9a 100644 --- a/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/metadata/__init__.pyi @@ -1,7 +1,7 @@ import abc import pathlib import sys -from _typeshed import Self, StrPath +from _typeshed import StrPath from collections.abc import Iterable, Mapping from email.message import Message from importlib.abc import MetaPathFinder @@ -9,6 +9,7 @@ from os import PathLike from pathlib import Path from re import Pattern from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Self __all__ = [ "Distribution", @@ -41,6 +42,9 @@ class _EntryPointBase(NamedTuple): class EntryPoint(_EntryPointBase): pattern: ClassVar[Pattern[str]] + if sys.version_info >= (3, 11): + def __init__(self, name: str, value: str, group: str) -> None: ... + def load(self) -> Any: ... # Callable[[], Any] or an importable module @property def extras(self) -> list[str]: ... @@ -83,13 +87,13 @@ if sys.version_info >= (3, 10): class SelectableGroups(dict[str, EntryPoints]): # use as dict is deprecated since 3.10 @classmethod - def load(cls: type[Self], eps: Iterable[EntryPoint]) -> Self: ... + def load(cls, eps: Iterable[EntryPoint]) -> Self: ... @property def groups(self) -> set[str]: ... @property def names(self) -> set[str]: ... @overload - def select(self: Self) -> Self: ... # type: ignore[misc] + def select(self) -> Self: ... # type: ignore[misc] @overload def select( self, @@ -103,7 +107,7 @@ if sys.version_info >= (3, 10): ) -> EntryPoints: ... class PackagePath(pathlib.PurePosixPath): - def read_text(self, encoding: str = ...) -> str: ... + def read_text(self, encoding: str = "utf-8") -> str: ... def read_binary(self) -> bytes: ... def locate(self) -> PathLike[str]: ... # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: @@ -129,7 +133,7 @@ class Distribution: @overload @classmethod def discover( - cls, *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any + cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... @staticmethod def at(path: StrPath) -> PathDistribution: ... @@ -182,7 +186,7 @@ def distribution(distribution_name: str) -> Distribution: ... def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload def distributions( - *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any + *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any ) -> Iterable[Distribution]: ... if sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/importlib/resources.pyi b/mypy/typeshed/stdlib/importlib/resources.pyi index 28ca107f4195..ba3d9b087754 100644 --- a/mypy/typeshed/stdlib/importlib/resources.pyi +++ b/mypy/typeshed/stdlib/importlib/resources.pyi @@ -23,9 +23,9 @@ else: Resource: TypeAlias = str | os.PathLike[Any] def open_binary(package: Package, resource: Resource) -> BinaryIO: ... -def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... +def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ... def read_binary(package: Package, resource: Resource) -> bytes: ... -def read_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> str: ... +def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ... def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ... def is_resource(package: Package, name: str) -> bool: ... def contents(package: Package) -> Iterator[str]: ... diff --git a/mypy/typeshed/stdlib/importlib/util.pyi b/mypy/typeshed/stdlib/importlib/util.pyi index dca4778fd416..f988eb270a26 100644 --- a/mypy/typeshed/stdlib/importlib/util.pyi +++ b/mypy/typeshed/stdlib/importlib/util.pyi @@ -1,7 +1,7 @@ import importlib.abc import importlib.machinery import types -from _typeshed import StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Callable from typing import Any from typing_extensions import ParamSpec @@ -15,18 +15,18 @@ def resolve_name(name: str, package: str | None) -> str: ... MAGIC_NUMBER: bytes -def cache_from_source(path: str, debug_override: bool | None = ..., *, optimization: Any | None = ...) -> str: ... +def cache_from_source(path: str, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ... def source_from_cache(path: str) -> str: ... -def decode_source(source_bytes: bytes) -> str: ... -def find_spec(name: str, package: str | None = ...) -> importlib.machinery.ModuleSpec | None: ... +def decode_source(source_bytes: ReadableBuffer) -> str: ... +def find_spec(name: str, package: str | None = None) -> importlib.machinery.ModuleSpec | None: ... def spec_from_loader( - name: str, loader: importlib.abc.Loader | None, *, origin: str | None = ..., is_package: bool | None = ... + name: str, loader: importlib.abc.Loader | None, *, origin: str | None = None, is_package: bool | None = None ) -> importlib.machinery.ModuleSpec | None: ... def spec_from_file_location( name: str, - location: StrOrBytesPath | None = ..., + location: StrOrBytesPath | None = None, *, - loader: importlib.abc.Loader | None = ..., + loader: importlib.abc.Loader | None = None, submodule_search_locations: list[str] | None = ..., ) -> importlib.machinery.ModuleSpec | None: ... def module_from_spec(spec: importlib.machinery.ModuleSpec) -> types.ModuleType: ... @@ -35,7 +35,6 @@ class LazyLoader(importlib.abc.Loader): def __init__(self, loader: importlib.abc.Loader) -> None: ... @classmethod def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... - def create_module(self, spec: importlib.machinery.ModuleSpec) -> types.ModuleType | None: ... def exec_module(self, module: types.ModuleType) -> None: ... -def source_hash(source_bytes: bytes) -> int: ... +def source_hash(source_bytes: ReadableBuffer) -> int: ... diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index 7f9667c6ebed..2525ef4968ec 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -2,7 +2,6 @@ import dis import enum import sys import types -from _typeshed import Self from collections import OrderedDict from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine, Generator, Mapping, Sequence, Set as AbstractSet from types import ( @@ -25,8 +24,8 @@ from types import ( TracebackType, WrapperDescriptorType, ) -from typing import Any, ClassVar, NamedTuple, Protocol, TypeVar, Union, overload -from typing_extensions import Literal, ParamSpec, TypeAlias, TypeGuard +from typing import Any, ClassVar, NamedTuple, Protocol, TypeVar, overload +from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypeGuard if sys.version_info >= (3, 11): __all__ = [ @@ -165,10 +164,10 @@ modulesbyfile: dict[str, Any] _GetMembersPredicate: TypeAlias = Callable[[Any], bool] _GetMembersReturn: TypeAlias = list[tuple[str, Any]] -def getmembers(object: object, predicate: _GetMembersPredicate | None = ...) -> _GetMembersReturn: ... +def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... if sys.version_info >= (3, 11): - def getmembers_static(object: object, predicate: _GetMembersPredicate | None = ...) -> _GetMembersReturn: ... + def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... def getmodulename(path: str) -> str | None: ... def ismodule(object: object) -> TypeGuard[ModuleType]: ... @@ -264,17 +263,17 @@ def isdatadescriptor(object: object) -> TypeGuard[_SupportsSet[Any, Any] | _Supp # # Retrieving source code # -_SourceObjectType: TypeAlias = Union[ - ModuleType, type[Any], MethodType, FunctionType, TracebackType, FrameType, CodeType, Callable[..., Any] -] +_SourceObjectType: TypeAlias = ( + ModuleType | type[Any] | MethodType | FunctionType | TracebackType | FrameType | CodeType | Callable[..., Any] +) def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... -def getabsfile(object: _SourceObjectType, _filename: str | None = ...) -> str: ... +def getabsfile(object: _SourceObjectType, _filename: str | None = None) -> str: ... def getblock(lines: Sequence[str]) -> Sequence[str]: ... def getdoc(object: object) -> str | None: ... def getcomments(object: object) -> str | None: ... def getfile(object: _SourceObjectType) -> str: ... -def getmodule(object: object, _filename: str | None = ...) -> ModuleType | None: ... +def getmodule(object: object, _filename: str | None = None) -> ModuleType | None: ... def getsourcefile(object: _SourceObjectType) -> str | None: ... def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... def getsource(object: _SourceObjectType) -> str: ... @@ -290,21 +289,21 @@ if sys.version_info >= (3, 10): def signature( obj: _IntrospectableCallable, *, - follow_wrapped: bool = ..., - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., - eval_str: bool = ..., + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, ) -> Signature: ... else: - def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = ...) -> Signature: ... + def signature(obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Signature: ... class _void: ... class _empty: ... class Signature: def __init__( - self, parameters: Sequence[Parameter] | None = ..., *, return_annotation: Any = ..., __validate_parameters__: bool = ... + self, parameters: Sequence[Parameter] | None = None, *, return_annotation: Any = ..., __validate_parameters__: bool = True ) -> None: ... empty = _empty @property @@ -313,23 +312,21 @@ class Signature: def return_annotation(self) -> Any: ... def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... - def replace( - self: Self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ... - ) -> Self: ... + def replace(self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ...) -> Self: ... if sys.version_info >= (3, 10): @classmethod def from_callable( - cls: type[Self], + cls, obj: _IntrospectableCallable, *, - follow_wrapped: bool = ..., - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., - eval_str: bool = ..., + follow_wrapped: bool = True, + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, ) -> Self: ... else: @classmethod - def from_callable(cls: type[Self], obj: _IntrospectableCallable, *, follow_wrapped: bool = ...) -> Self: ... + def from_callable(cls, obj: _IntrospectableCallable, *, follow_wrapped: bool = True) -> Self: ... def __eq__(self, other: object) -> bool: ... @@ -337,9 +334,9 @@ if sys.version_info >= (3, 10): def get_annotations( obj: Callable[..., object] | type[Any] | ModuleType, *, - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., - eval_str: bool = ..., + globals: Mapping[str, Any] | None = None, + locals: Mapping[str, Any] | None = None, + eval_str: bool = False, ) -> dict[str, Any]: ... # The name is the same as the enum's name in CPython @@ -372,7 +369,7 @@ class Parameter: @property def annotation(self) -> Any: ... def replace( - self: Self, + self, *, name: str | type[_void] = ..., kind: _ParameterKind | type[_void] = ..., @@ -400,8 +397,8 @@ class BoundArguments: # TODO: The actual return type should be list[_ClassTreeItem] but mypy doesn't # seem to be supporting this at the moment: # _ClassTreeItem = list[_ClassTreeItem] | Tuple[type, Tuple[type, ...]] -def getclasstree(classes: list[type], unique: bool = ...) -> list[Any]: ... -def walktree(classes: list[type], children: dict[type[Any], list[type]], parent: type[Any] | None) -> list[Any]: ... +def getclasstree(classes: list[type], unique: bool = False) -> list[Any]: ... +def walktree(classes: list[type], children: Mapping[type[Any], list[type]], parent: type[Any] | None) -> list[Any]: ... class Arguments(NamedTuple): args: list[str] @@ -436,18 +433,18 @@ class ArgInfo(NamedTuple): locals: dict[str, Any] def getargvalues(frame: FrameType) -> ArgInfo: ... -def formatannotation(annotation: object, base_module: str | None = ...) -> str: ... +def formatannotation(annotation: object, base_module: str | None = None) -> str: ... def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... if sys.version_info < (3, 11): def formatargspec( args: list[str], - varargs: str | None = ..., - varkw: str | None = ..., - defaults: tuple[Any, ...] | None = ..., + varargs: str | None = None, + varkw: str | None = None, + defaults: tuple[Any, ...] | None = None, kwonlyargs: Sequence[str] | None = ..., - kwonlydefaults: dict[str, Any] | None = ..., - annotations: dict[str, Any] = ..., + kwonlydefaults: Mapping[str, Any] | None = ..., + annotations: Mapping[str, Any] = ..., formatarg: Callable[[str], str] = ..., formatvarargs: Callable[[str], str] = ..., formatvarkw: Callable[[str], str] = ..., @@ -460,7 +457,7 @@ def formatargvalues( args: list[str], varargs: str | None, varkw: str | None, - locals: dict[str, Any] | None, + locals: Mapping[str, Any] | None, formatarg: Callable[[str], str] | None = ..., formatvarargs: Callable[[str], str] | None = ..., formatvarkw: Callable[[str], str] | None = ..., @@ -476,7 +473,7 @@ class ClosureVars(NamedTuple): unbound: AbstractSet[str] def getclosurevars(func: _IntrospectableCallable) -> ClosureVars: ... -def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = ...) -> Any: ... +def unwrap(func: Callable[..., Any], *, stop: Callable[[Callable[..., Any]], Any] | None = None) -> Any: ... # # The interpreter stack @@ -493,14 +490,14 @@ if sys.version_info >= (3, 11): class Traceback(_Traceback): positions: dis.Positions | None def __new__( - cls: type[Self], + cls, filename: str, lineno: int, function: str, code_context: list[str] | None, index: int | None, *, - positions: dis.Positions | None = ..., + positions: dis.Positions | None = None, ) -> Self: ... class _FrameInfo(NamedTuple): @@ -514,7 +511,7 @@ if sys.version_info >= (3, 11): class FrameInfo(_FrameInfo): positions: dis.Positions | None def __new__( - cls: type[Self], + cls, frame: FrameType, filename: str, lineno: int, @@ -522,7 +519,7 @@ if sys.version_info >= (3, 11): code_context: list[str] | None, index: int | None, *, - positions: dis.Positions | None = ..., + positions: dis.Positions | None = None, ) -> Self: ... else: @@ -541,13 +538,13 @@ else: code_context: list[str] | None index: int | None # type: ignore[assignment] -def getframeinfo(frame: FrameType | TracebackType, context: int = ...) -> Traceback: ... -def getouterframes(frame: Any, context: int = ...) -> list[FrameInfo]: ... -def getinnerframes(tb: TracebackType, context: int = ...) -> list[FrameInfo]: ... +def getframeinfo(frame: FrameType | TracebackType, context: int = 1) -> Traceback: ... +def getouterframes(frame: Any, context: int = 1) -> list[FrameInfo]: ... +def getinnerframes(tb: TracebackType, context: int = 1) -> list[FrameInfo]: ... def getlineno(frame: FrameType) -> int: ... def currentframe() -> FrameType | None: ... -def stack(context: int = ...) -> list[FrameInfo]: ... -def trace(context: int = ...) -> list[FrameInfo]: ... +def stack(context: int = 1) -> list[FrameInfo]: ... +def trace(context: int = 1) -> list[FrameInfo]: ... # # Fetching attributes statically @@ -585,7 +582,7 @@ _Object: TypeAlias = object class Attribute(NamedTuple): name: str - kind: str + kind: Literal["class method", "static method", "property", "method", "data"] defining_class: type object: _Object diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index f47a9ddf334c..c3e07bacbe5a 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -2,12 +2,12 @@ import abc import builtins import codecs import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import FileDescriptorOrPath, ReadableBuffer, WriteableBuffer from collections.abc import Callable, Iterable, Iterator from os import _Opener from types import TracebackType from typing import IO, Any, BinaryIO, TextIO -from typing_extensions import Literal +from typing_extensions import Literal, Self __all__ = [ "BlockingIOError", @@ -51,7 +51,7 @@ class UnsupportedOperation(OSError, ValueError): ... class IOBase(metaclass=abc.ABCMeta): def __iter__(self) -> Iterator[bytes]: ... def __next__(self) -> bytes: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @@ -61,7 +61,7 @@ class IOBase(metaclass=abc.ABCMeta): def isatty(self) -> bool: ... def readable(self) -> bool: ... read: Callable[..., Any] - def readlines(self, __hint: int = ...) -> list[bytes]: ... + def readlines(self, __hint: int = -1) -> list[bytes]: ... def seek(self, __offset: int, __whence: int = ...) -> int: ... def seekable(self) -> bool: ... def tell(self) -> int: ... @@ -69,7 +69,7 @@ class IOBase(metaclass=abc.ABCMeta): def writable(self) -> bool: ... write: Callable[..., Any] def writelines(self, __lines: Iterable[ReadableBuffer]) -> None: ... - def readline(self, __size: int | None = ...) -> bytes: ... + def readline(self, __size: int | None = -1) -> bytes: ... def __del__(self) -> None: ... @property def closed(self) -> bool: ... @@ -79,7 +79,7 @@ class RawIOBase(IOBase): def readall(self) -> bytes: ... def readinto(self, __buffer: WriteableBuffer) -> int | None: ... def write(self, __b: ReadableBuffer) -> int | None: ... - def read(self, __size: int = ...) -> bytes | None: ... + def read(self, __size: int = -1) -> bytes | None: ... class BufferedIOBase(IOBase): raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations. @@ -92,43 +92,40 @@ class BufferedIOBase(IOBase): class FileIO(RawIOBase, BinaryIO): mode: str - name: StrOrBytesPath | int # type: ignore[assignment] + name: FileDescriptorOrPath # type: ignore[assignment] def __init__( - self, file: StrOrBytesPath | int, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... + self, file: FileDescriptorOrPath, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... ) -> None: ... @property def closefd(self) -> bool: ... def write(self, __b: ReadableBuffer) -> int: ... - def read(self, __size: int = ...) -> bytes: ... - def __enter__(self: Self) -> Self: ... + def read(self, __size: int = -1) -> bytes: ... + def __enter__(self) -> Self: ... class BytesIO(BufferedIOBase, BinaryIO): - def __init__(self, initial_bytes: bytes = ...) -> None: ... + def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined # as a read-only property on IO[]. name: Any - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def getvalue(self) -> bytes: ... def getbuffer(self) -> memoryview: ... - def read1(self, __size: int | None = ...) -> bytes: ... + def read1(self, __size: int | None = -1) -> bytes: ... class BufferedReader(BufferedIOBase, BinaryIO): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... - def peek(self, __size: int = ...) -> bytes: ... - def read1(self, __size: int = ...) -> bytes: ... + def peek(self, __size: int = 0) -> bytes: ... class BufferedWriter(BufferedIOBase, BinaryIO): - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def write(self, __buffer: ReadableBuffer) -> int: ... class BufferedRandom(BufferedReader, BufferedWriter): - def __enter__(self: Self) -> Self: ... - def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... - def seek(self, __target: int, __whence: int = ...) -> int: ... - def read1(self, __size: int = ...) -> bytes: ... + def __enter__(self) -> Self: ... + def seek(self, __target: int, __whence: int = 0) -> int: ... # stubtest needs this class BufferedRWPair(BufferedIOBase): def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... @@ -144,9 +141,8 @@ class TextIOBase(IOBase): def write(self, __s: str) -> int: ... def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] def readline(self, __size: int = ...) -> str: ... # type: ignore[override] - def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] + def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] def read(self, __size: int | None = ...) -> str: ... - def tell(self) -> int: ... class TextIOWrapper(TextIOBase, TextIO): def __init__( @@ -169,20 +165,20 @@ class TextIOWrapper(TextIOBase, TextIO): def reconfigure( self, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - line_buffering: bool | None = ..., - write_through: bool | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + line_buffering: bool | None = None, + write_through: bool | None = None, ) -> None: ... # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy. - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __iter__(self) -> Iterator[str]: ... # type: ignore[override] def __next__(self) -> str: ... # type: ignore[override] def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] - def readline(self, __size: int = ...) -> str: ... # type: ignore[override] - def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] - def seek(self, __cookie: int, __whence: int = ...) -> int: ... + def readline(self, __size: int = -1) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] + def seek(self, __cookie: int, __whence: int = 0) -> int: ... # stubtest needs this class StringIO(TextIOWrapper): def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ... @@ -194,7 +190,7 @@ class StringIO(TextIOWrapper): class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... - def decode(self, input: bytes | str, final: bool = ...) -> str: ... + def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ... @property def newlines(self) -> str | tuple[str, ...] | None: ... def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/mypy/typeshed/stdlib/ipaddress.pyi b/mypy/typeshed/stdlib/ipaddress.pyi index d324f52ac25a..9f9662137765 100644 --- a/mypy/typeshed/stdlib/ipaddress.pyi +++ b/mypy/typeshed/stdlib/ipaddress.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Container, Iterable, Iterator from typing import Any, Generic, SupportsInt, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias # Undocumented length constants IPV4LENGTH: Literal[32] @@ -15,8 +14,12 @@ _RawIPAddress: TypeAlias = int | str | bytes | IPv4Address | IPv6Address _RawNetworkPart: TypeAlias = IPv4Network | IPv6Network | IPv4Interface | IPv6Interface def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... -def ip_network(address: _RawIPAddress | _RawNetworkPart, strict: bool = ...) -> IPv4Network | IPv6Network: ... -def ip_interface(address: _RawIPAddress | _RawNetworkPart) -> IPv4Interface | IPv6Interface: ... +def ip_network( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int], strict: bool = True +) -> IPv4Network | IPv6Network: ... +def ip_interface( + address: _RawIPAddress | _RawNetworkPart | tuple[_RawIPAddress] | tuple[_RawIPAddress, int] +) -> IPv4Interface | IPv6Interface: ... class _IPAddressBase: @property @@ -30,20 +33,20 @@ class _IPAddressBase: class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... - def __add__(self: Self, other: int) -> Self: ... + def __add__(self, other: int) -> Self: ... def __int__(self) -> int: ... - def __sub__(self: Self, other: int) -> Self: ... + def __sub__(self, other: int) -> Self: ... def __format__(self, fmt: str) -> str: ... def __eq__(self, other: object) -> bool: ... - def __lt__(self: Self, other: Self) -> bool: ... + def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self: Self, other: Self) -> bool: ... - def __gt__(self: Self, other: Self) -> bool: ... - def __le__(self: Self, other: Self) -> bool: ... + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... else: - def __ge__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __gt__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __le__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... @property def is_global(self) -> bool: ... @@ -72,20 +75,20 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... def __eq__(self, other: object) -> bool: ... - def __lt__(self: Self, other: Self) -> bool: ... + def __lt__(self, other: Self) -> bool: ... if sys.version_info >= (3, 11): - def __ge__(self: Self, other: Self) -> bool: ... - def __gt__(self: Self, other: Self) -> bool: ... - def __le__(self: Self, other: Self) -> bool: ... + def __ge__(self, other: Self) -> bool: ... + def __gt__(self, other: Self) -> bool: ... + def __le__(self, other: Self) -> bool: ... else: - def __ge__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __gt__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... - def __le__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Self, NotImplemented: Any = ...) -> bool: ... - def address_exclude(self: Self, other: Self) -> Iterator[Self]: ... + def address_exclude(self, other: Self) -> Iterator[Self]: ... @property def broadcast_address(self) -> _A: ... - def compare_networks(self: Self, other: Self) -> int: ... + def compare_networks(self, other: Self) -> int: ... def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @@ -108,10 +111,10 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... @property def prefixlen(self) -> int: ... - def subnet_of(self: Self, other: Self) -> bool: ... - def supernet_of(self: Self, other: Self) -> bool: ... - def subnets(self: Self, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Iterator[Self]: ... - def supernet(self: Self, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Self: ... + def subnet_of(self, other: Self) -> bool: ... + def supernet_of(self, other: Self) -> bool: ... + def subnets(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Iterator[Self]: ... + def supernet(self, prefixlen_diff: int = 1, new_prefix: int | None = None) -> Self: ... @property def with_hostmask(self) -> str: ... @property diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index 7299ee8200db..c7b92c3aebb5 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -1,8 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Callable, Iterable, Iterator from typing import Any, Generic, SupportsComplex, SupportsFloat, SupportsInt, TypeVar, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -32,12 +31,12 @@ class count(Iterator[_N], Generic[_N]): @overload def __new__(cls, *, step: _N) -> count[_N]: ... def __next__(self) -> _N: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... class cycle(Iterator[_T], Generic[_T]): def __init__(self, __iterable: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... class repeat(Iterator[_T], Generic[_T]): @overload @@ -45,25 +44,25 @@ class repeat(Iterator[_T], Generic[_T]): @overload def __init__(self, object: _T, times: int) -> None: ... def __next__(self) -> _T: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __length_hint__(self) -> int: ... class accumulate(Iterator[_T], Generic[_T]): if sys.version_info >= (3, 8): @overload - def __init__(self, iterable: Iterable[_T], func: None = ..., *, initial: _T | None = ...) -> None: ... + def __init__(self, iterable: Iterable[_T], func: None = None, *, initial: _T | None = ...) -> None: ... @overload def __init__(self, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> None: ... else: def __init__(self, iterable: Iterable[_T], func: Callable[[_T, _T], _T] | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... @classmethod # We use type[Any] and not type[_S] to not lose the type inference from __iterable def from_iterable(cls: type[Any], __iterable: Iterable[Iterable[_S]]) -> chain[_S]: ... @@ -72,25 +71,25 @@ class chain(Iterator[_T], Generic[_T]): class compress(Iterator[_T], Generic[_T]): def __init__(self, data: Iterable[_T], selectors: Iterable[Any]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class dropwhile(Iterator[_T], Generic[_T]): def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class filterfalse(Iterator[_T], Generic[_T]): def __init__(self, __predicate: _Predicate[_T] | None, __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class groupby(Iterator[tuple[_T, Iterator[_S]]], Generic[_T, _S]): @overload - def __new__(cls, iterable: Iterable[_T1], key: None = ...) -> groupby[_T1, _T1]: ... + def __new__(cls, iterable: Iterable[_T1], key: None = None) -> groupby[_T1, _T1]: ... @overload def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T, Iterator[_S]]: ... class islice(Iterator[_T], Generic[_T]): @@ -98,20 +97,20 @@ class islice(Iterator[_T], Generic[_T]): def __init__(self, __iterable: Iterable[_T], __stop: int | None) -> None: ... @overload def __init__(self, __iterable: Iterable[_T], __start: int | None, __stop: int | None, __step: int | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class starmap(Iterator[_T], Generic[_T]): def __init__(self, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... class takewhile(Iterator[_T], Generic[_T]): def __init__(self, __predicate: _Predicate[_T], __iterable: Iterable[_T]) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T: ... -def tee(__iterable: Iterable[_T], __n: int = ...) -> tuple[Iterator[_T], ...]: ... +def tee(__iterable: Iterable[_T], __n: int = 2) -> tuple[Iterator[_T], ...]: ... class zip_longest(Iterator[_T_co], Generic[_T_co]): # one iterable (fillvalue doesn't matter) @@ -190,7 +189,7 @@ class zip_longest(Iterator[_T_co], Generic[_T_co]): *iterables: Iterable[_T], fillvalue: _T, ) -> zip_longest[tuple[_T, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... class product(Iterator[_T_co], Generic[_T_co]): @@ -239,12 +238,12 @@ class product(Iterator[_T_co], Generic[_T_co]): def __new__(cls, *iterables: Iterable[_T1], repeat: int) -> product[tuple[_T1, ...]]: ... @overload def __new__(cls, *iterables: Iterable[Any], repeat: int = ...) -> product[tuple[Any, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... class permutations(Iterator[tuple[_T, ...]], Generic[_T]): def __init__(self, iterable: Iterable[_T], r: int | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T, ...]: ... class combinations(Iterator[_T_co], Generic[_T_co]): @@ -258,16 +257,22 @@ class combinations(Iterator[_T_co], Generic[_T_co]): def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... @overload def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... class combinations_with_replacement(Iterator[tuple[_T, ...]], Generic[_T]): def __init__(self, iterable: Iterable[_T], r: int) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T, ...]: ... if sys.version_info >= (3, 10): class pairwise(Iterator[_T_co], Generic[_T_co]): def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> _T_co: ... + +if sys.version_info >= (3, 12): + class batched(Iterator[_T_co], Generic[_T_co]): + def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... + def __iter__(self) -> Self: ... + def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/mypy/typeshed/stdlib/json/__init__.pyi b/mypy/typeshed/stdlib/json/__init__.pyi index 2fd87622e1fe..63e9718ee151 100644 --- a/mypy/typeshed/stdlib/json/__init__.pyi +++ b/mypy/typeshed/stdlib/json/__init__.pyi @@ -1,6 +1,6 @@ -from _typeshed import SupportsRead +from _typeshed import SupportsRead, SupportsWrite from collections.abc import Callable -from typing import IO, Any +from typing import Any from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder from .encoder import JSONEncoder as JSONEncoder @@ -10,52 +10,52 @@ __all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", " def dumps( obj: Any, *, - skipkeys: bool = ..., - ensure_ascii: bool = ..., - check_circular: bool = ..., - allow_nan: bool = ..., - cls: type[JSONEncoder] | None = ..., - indent: None | int | str = ..., - separators: tuple[str, str] | None = ..., - default: Callable[[Any], Any] | None = ..., - sort_keys: bool = ..., + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, **kwds: Any, ) -> str: ... def dump( obj: Any, - fp: IO[str], + fp: SupportsWrite[str], *, - skipkeys: bool = ..., - ensure_ascii: bool = ..., - check_circular: bool = ..., - allow_nan: bool = ..., - cls: type[JSONEncoder] | None = ..., - indent: None | int | str = ..., - separators: tuple[str, str] | None = ..., - default: Callable[[Any], Any] | None = ..., - sort_keys: bool = ..., + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + cls: type[JSONEncoder] | None = None, + indent: None | int | str = None, + separators: tuple[str, str] | None = None, + default: Callable[[Any], Any] | None = None, + sort_keys: bool = False, **kwds: Any, ) -> None: ... def loads( - s: str | bytes, + s: str | bytes | bytearray, *, - cls: type[JSONDecoder] | None = ..., - object_hook: Callable[[dict[Any, Any]], Any] | None = ..., - parse_float: Callable[[str], Any] | None = ..., - parse_int: Callable[[str], Any] | None = ..., - parse_constant: Callable[[str], Any] | None = ..., - object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, ) -> Any: ... def load( fp: SupportsRead[str | bytes], *, - cls: type[JSONDecoder] | None = ..., - object_hook: Callable[[dict[Any, Any]], Any] | None = ..., - parse_float: Callable[[str], Any] | None = ..., - parse_int: Callable[[str], Any] | None = ..., - parse_constant: Callable[[str], Any] | None = ..., - object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., + cls: type[JSONDecoder] | None = None, + object_hook: Callable[[dict[Any, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = None, **kwds: Any, ) -> Any: ... -def detect_encoding(b: bytes) -> str: ... # undocumented +def detect_encoding(b: bytes | bytearray) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/json/decoder.pyi b/mypy/typeshed/stdlib/json/decoder.pyi index 2060cf17dd05..8debfe6cd65a 100644 --- a/mypy/typeshed/stdlib/json/decoder.pyi +++ b/mypy/typeshed/stdlib/json/decoder.pyi @@ -21,12 +21,12 @@ class JSONDecoder: def __init__( self, *, - object_hook: Callable[[dict[str, Any]], Any] | None = ..., - parse_float: Callable[[str], Any] | None = ..., - parse_int: Callable[[str], Any] | None = ..., - parse_constant: Callable[[str], Any] | None = ..., - strict: bool = ..., - object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = ..., + object_hook: Callable[[dict[str, Any]], Any] | None = None, + parse_float: Callable[[str], Any] | None = None, + parse_int: Callable[[str], Any] | None = None, + parse_constant: Callable[[str], Any] | None = None, + strict: bool = True, + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = None, ) -> None: ... def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented - def raw_decode(self, s: str, idx: int = ...) -> tuple[Any, int]: ... + def raw_decode(self, s: str, idx: int = 0) -> tuple[Any, int]: ... diff --git a/mypy/typeshed/stdlib/json/encoder.pyi b/mypy/typeshed/stdlib/json/encoder.pyi index 60e82061946b..0c0d366eb7a2 100644 --- a/mypy/typeshed/stdlib/json/encoder.pyi +++ b/mypy/typeshed/stdlib/json/encoder.pyi @@ -20,19 +20,19 @@ class JSONEncoder: check_circular: bool allow_nan: bool sort_keys: bool - indent: int + indent: int | str def __init__( self, *, - skipkeys: bool = ..., - ensure_ascii: bool = ..., - check_circular: bool = ..., - allow_nan: bool = ..., - sort_keys: bool = ..., - indent: int | None = ..., - separators: tuple[str, str] | None = ..., - default: Callable[..., Any] | None = ..., + skipkeys: bool = False, + ensure_ascii: bool = True, + check_circular: bool = True, + allow_nan: bool = True, + sort_keys: bool = False, + indent: int | str | None = None, + separators: tuple[str, str] | None = None, + default: Callable[..., Any] | None = None, ) -> None: ... def default(self, o: Any) -> Any: ... def encode(self, o: Any) -> str: ... - def iterencode(self, o: Any, _one_shot: bool = ...) -> Iterator[str]: ... + def iterencode(self, o: Any, _one_shot: bool = False) -> Iterator[str]: ... diff --git a/mypy/typeshed/stdlib/keyword.pyi b/mypy/typeshed/stdlib/keyword.pyi index c17c58012fd1..46c386048858 100644 --- a/mypy/typeshed/stdlib/keyword.pyi +++ b/mypy/typeshed/stdlib/keyword.pyi @@ -1,5 +1,6 @@ import sys from collections.abc import Sequence +from typing_extensions import Final if sys.version_info >= (3, 9): __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] @@ -8,8 +9,13 @@ else: def iskeyword(s: str) -> bool: ... -kwlist: Sequence[str] +# a list at runtime, but you're not meant to mutate it; +# type it as a sequence +kwlist: Final[Sequence[str]] if sys.version_info >= (3, 9): def issoftkeyword(s: str) -> bool: ... - softkwlist: Sequence[str] + + # a list at runtime, but you're not meant to mutate it; + # type it as a sequence + softkwlist: Final[Sequence[str]] diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi index 45c9aeaa5691..9f6e4d6774ad 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -12,13 +12,13 @@ class Driver: grammar: Grammar logger: Logger convert: _Convert - def __init__(self, grammar: Grammar, convert: _Convert | None = ..., logger: Logger | None = ...) -> None: ... - def parse_tokens(self, tokens: Iterable[Any], debug: bool = ...) -> _NL: ... - def parse_stream_raw(self, stream: IO[str], debug: bool = ...) -> _NL: ... - def parse_stream(self, stream: IO[str], debug: bool = ...) -> _NL: ... - def parse_file(self, filename: StrPath, encoding: str | None = ..., debug: bool = ...) -> _NL: ... - def parse_string(self, text: str, debug: bool = ...) -> _NL: ... + def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... + def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> _NL: ... + def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... + def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... + def parse_string(self, text: str, debug: bool = False) -> _NL: ... def load_grammar( - gt: str = ..., gp: str | None = ..., save: bool = ..., force: bool = ..., logger: Logger | None = ... + gt: str = "Grammar.txt", gp: str | None = None, save: bool = True, force: bool = False, logger: Logger | None = None ) -> Grammar: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi index 4d298ec6972c..bef0a7922683 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/grammar.pyi @@ -1,5 +1,5 @@ -from _typeshed import Self, StrPath -from typing_extensions import TypeAlias +from _typeshed import StrPath +from typing_extensions import Self, TypeAlias _Label: TypeAlias = tuple[int, str | None] _DFA: TypeAlias = list[list[tuple[int, int]]] @@ -15,10 +15,9 @@ class Grammar: tokens: dict[int, int] symbol2label: dict[str, int] start: int - def __init__(self) -> None: ... def dump(self, filename: StrPath) -> None: ... def load(self, filename: StrPath) -> None: ... - def copy(self: Self) -> Self: ... + def copy(self) -> Self: ... def report(self) -> None: ... opmap_raw: str diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi index 6a07c4a4ad48..51eb671f4236 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -20,8 +20,8 @@ class Parser: stack: list[tuple[_DFAS, int, _RawNode]] rootnode: _NL | None used_names: set[str] - def __init__(self, grammar: Grammar, convert: _Convert | None = ...) -> None: ... - def setup(self, start: int | None = ...) -> None: ... + def __init__(self, grammar: Grammar, convert: _Convert | None = None) -> None: ... + def setup(self, start: int | None = None) -> None: ... def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... def classify(self, type: int, value: str | None, context: _Context) -> int: ... def shift(self, type: int, value: str | None, newstate: int, context: _Context) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi index e3ea07432d70..d346739d4d58 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi @@ -11,7 +11,7 @@ class ParserGenerator: stream: IO[str] generator: Iterator[_TokenInfo] first: dict[str, dict[str, int]] - def __init__(self, filename: StrPath, stream: IO[str] | None = ...) -> None: ... + def __init__(self, filename: StrPath, stream: IO[str] | None = None) -> None: ... def make_grammar(self) -> PgenGrammar: ... def make_first(self, c: PgenGrammar, name: str) -> dict[int, int]: ... def make_label(self, c: PgenGrammar, label: str) -> int: ... @@ -26,14 +26,13 @@ class ParserGenerator: def parse_alt(self) -> tuple[NFAState, NFAState]: ... def parse_item(self) -> tuple[NFAState, NFAState]: ... def parse_atom(self) -> tuple[NFAState, NFAState]: ... - def expect(self, type: int, value: Any | None = ...) -> str: ... + def expect(self, type: int, value: Any | None = None) -> str: ... def gettoken(self) -> None: ... def raise_error(self, msg: str, *args: Any) -> NoReturn: ... class NFAState: arcs: list[tuple[str | None, NFAState]] - def __init__(self) -> None: ... - def addarc(self, next: NFAState, label: str | None = ...) -> None: ... + def addarc(self, next: NFAState, label: str | None = None) -> None: ... class DFAState: nfaset: dict[NFAState, Any] @@ -44,4 +43,4 @@ class DFAState: def unifystate(self, old: DFAState, new: DFAState) -> None: ... def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override] -def generate_grammar(filename: StrPath = ...) -> PgenGrammar: ... +def generate_grammar(filename: StrPath = "Grammar.txt") -> PgenGrammar: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi index c9ad1e7bb411..2a9c3fbba821 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -87,7 +87,6 @@ class Untokenizer: tokens: list[str] prev_row: int prev_col: int - def __init__(self) -> None: ... def add_whitespace(self, start: _Coord) -> None: ... def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pygram.pyi b/mypy/typeshed/stdlib/lib2to3/pygram.pyi index bf96a55c41b3..00fdbd1a124e 100644 --- a/mypy/typeshed/stdlib/lib2to3/pygram.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pygram.pyi @@ -1,3 +1,4 @@ +import sys from lib2to3.pgen2.grammar import Grammar class Symbols: @@ -110,4 +111,6 @@ class pattern_symbols(Symbols): python_grammar: Grammar python_grammar_no_print_statement: Grammar +if sys.version_info >= (3, 8): + python_grammar_no_print_and_exec_statement: Grammar pattern_grammar: Grammar diff --git a/mypy/typeshed/stdlib/lib2to3/pytree.pyi b/mypy/typeshed/stdlib/lib2to3/pytree.pyi index 4db9ab99ba44..4f756c9768db 100644 --- a/mypy/typeshed/stdlib/lib2to3/pytree.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pytree.pyi @@ -1,8 +1,7 @@ -from _typeshed import Self from collections.abc import Iterator from lib2to3.pgen2.grammar import Grammar from typing import Any -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias _NL: TypeAlias = Node | Leaf _Context: TypeAlias = tuple[str, int, int] @@ -21,8 +20,8 @@ class Base: was_changed: bool was_checked: bool def __eq__(self, other: object) -> bool: ... - def _eq(self: Self, other: Self) -> bool: ... - def clone(self: Self) -> Self: ... + def _eq(self, other: Self) -> bool: ... + def clone(self) -> Self: ... def post_order(self) -> Iterator[_NL]: ... def pre_order(self) -> Iterator[_NL]: ... def replace(self, new: _NL | list[_NL]) -> None: ... @@ -43,9 +42,9 @@ class Node(Base): self, type: int, children: list[_NL], - context: Any | None = ..., - prefix: str | None = ..., - fixers_applied: list[Any] | None = ..., + context: Any | None = None, + prefix: str | None = None, + fixers_applied: list[Any] | None = None, ) -> None: ... def set_child(self, i: int, child: _NL) -> None: ... def insert_child(self, i: int, child: _NL) -> None: ... @@ -58,7 +57,7 @@ class Leaf(Base): value: str fixers_applied: list[Any] def __init__( - self, type: int, value: str, context: _Context | None = ..., prefix: str | None = ..., fixers_applied: list[Any] = ... + self, type: int, value: str, context: _Context | None = None, prefix: str | None = None, fixers_applied: list[Any] = ... ) -> None: ... def __unicode__(self) -> str: ... @@ -69,23 +68,23 @@ class BasePattern: content: str | None name: str | None def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns - def match(self, node: _NL, results: _Results | None = ...) -> bool: ... - def match_seq(self, nodes: list[_NL], results: _Results | None = ...) -> bool: ... + def match(self, node: _NL, results: _Results | None = None) -> bool: ... + def match_seq(self, nodes: list[_NL], results: _Results | None = None) -> bool: ... def generate_matches(self, nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... class LeafPattern(BasePattern): - def __init__(self, type: int | None = ..., content: str | None = ..., name: str | None = ...) -> None: ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class NodePattern(BasePattern): wildcards: bool - def __init__(self, type: int | None = ..., content: str | None = ..., name: str | None = ...) -> None: ... + def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... class WildcardPattern(BasePattern): min: int max: int - def __init__(self, content: str | None = ..., min: int = ..., max: int = ..., name: str | None = ...) -> None: ... + def __init__(self, content: str | None = None, min: int = 0, max: int = 0x7FFFFFFF, name: str | None = None) -> None: ... class NegatedPattern(BasePattern): - def __init__(self, content: str | None = ...) -> None: ... + def __init__(self, content: str | None = None) -> None: ... def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... diff --git a/mypy/typeshed/stdlib/lib2to3/refactor.pyi b/mypy/typeshed/stdlib/lib2to3/refactor.pyi index 3aaea0e519d9..f1d89679aee7 100644 --- a/mypy/typeshed/stdlib/lib2to3/refactor.pyi +++ b/mypy/typeshed/stdlib/lib2to3/refactor.pyi @@ -8,7 +8,7 @@ from .pgen2.grammar import Grammar _Driver: TypeAlias = Any # really lib2to3.driver.Driver _BottomMatcher: TypeAlias = Any # really lib2to3.btm_matcher.BottomMatcher -def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = ...) -> list[str]: ... +def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... def get_fixers_from_package(pkg_name: str) -> list[str]: ... class FixerError(Exception): ... @@ -33,25 +33,25 @@ class RefactoringTool: bmi_pre_order: list[Any] bmi_post_order: list[Any] def __init__( - self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = ..., explicit: Container[str] | None = ... + self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None ) -> None: ... def get_fixers(self) -> tuple[list[Any], list[Any]]: ... def log_error(self, msg: str, *args: Any, **kwds: Any) -> NoReturn: ... def log_message(self, msg: str, *args: Any) -> None: ... def log_debug(self, msg: str, *args: Any) -> None: ... def print_output(self, old_text: str, new_text: str, filename: str, equal): ... - def refactor(self, items: Iterable[str], write: bool = ..., doctests_only: bool = ...) -> None: ... - def refactor_dir(self, dir_name: str, write: bool = ..., doctests_only: bool = ...) -> None: ... + def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... def _read_python_source(self, filename: str) -> tuple[str, str]: ... - def refactor_file(self, filename: str, write: bool = ..., doctests_only: bool = ...) -> None: ... + def refactor_file(self, filename: str, write: bool = False, doctests_only: bool = False) -> None: ... def refactor_string(self, data: str, name: str): ... - def refactor_stdin(self, doctests_only: bool = ...) -> None: ... + def refactor_stdin(self, doctests_only: bool = False) -> None: ... def refactor_tree(self, tree, name: str) -> bool: ... def traverse_by(self, fixers, traversal) -> None: ... def processed_file( - self, new_text: str, filename: str, old_text: str | None = ..., write: bool = ..., encoding: str | None = ... + self, new_text: str, filename: str, old_text: str | None = None, write: bool = False, encoding: str | None = None ) -> None: ... - def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = ...) -> None: ... + def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = None) -> None: ... PS1: ClassVar[str] PS2: ClassVar[str] def refactor_docstring(self, input: str, filename: str) -> str: ... @@ -68,4 +68,6 @@ class MultiprocessingUnsupported(Exception): ... class MultiprocessRefactoringTool(RefactoringTool): queue: Any | None output_lock: Any | None - def refactor(self, items: Iterable[str], write: bool = ..., doctests_only: bool = ..., num_processes: int = ...) -> None: ... + def refactor( + self, items: Iterable[str], write: bool = False, doctests_only: bool = False, num_processes: int = 1 + ) -> None: ... diff --git a/mypy/typeshed/stdlib/linecache.pyi b/mypy/typeshed/stdlib/linecache.pyi index df54fd80aea7..8e317dd38990 100644 --- a/mypy/typeshed/stdlib/linecache.pyi +++ b/mypy/typeshed/stdlib/linecache.pyi @@ -15,9 +15,9 @@ class _SourceLoader(Protocol): cache: dict[str, _SourceLoader | _ModuleMetadata] # undocumented -def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = ...) -> str: ... +def getline(filename: str, lineno: int, module_globals: _ModuleGlobals | None = None) -> str: ... def clearcache() -> None: ... -def getlines(filename: str, module_globals: _ModuleGlobals | None = ...) -> list[str]: ... -def checkcache(filename: str | None = ...) -> None: ... -def updatecache(filename: str, module_globals: _ModuleGlobals | None = ...) -> list[str]: ... +def getlines(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... +def checkcache(filename: str | None = None) -> None: ... +def updatecache(filename: str, module_globals: _ModuleGlobals | None = None) -> list[str]: ... def lazycache(filename: str, module_globals: _ModuleGlobals) -> bool: ... diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index 9a3ea65d1b8b..0b0dd9456e52 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -111,19 +111,19 @@ CHAR_MAX: int class Error(Exception): ... -def setlocale(category: int, locale: _str | Iterable[_str | None] | None = ...) -> _str: ... +def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... def localeconv() -> Mapping[_str, int | _str | list[int]]: ... def nl_langinfo(__key: int) -> _str: ... def getdefaultlocale(envvars: tuple[_str, ...] = ...) -> tuple[_str | None, _str | None]: ... def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... -def getpreferredencoding(do_setlocale: bool = ...) -> _str: ... +def getpreferredencoding(do_setlocale: bool = True) -> _str: ... def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... def strcoll(__os1: _str, __os2: _str) -> int: ... def strxfrm(__string: _str) -> _str: ... -def format(percent: _str, value: float | Decimal, grouping: bool = ..., monetary: bool = ..., *additional: Any) -> _str: ... -def format_string(f: _str, val: Any, grouping: bool = ..., monetary: bool = ...) -> _str: ... -def currency(val: float | Decimal, symbol: bool = ..., grouping: bool = ..., international: bool = ...) -> _str: ... +def format(percent: _str, value: float | Decimal, grouping: bool = False, monetary: bool = False, *additional: Any) -> _str: ... +def format_string(f: _str, val: Any, grouping: bool = False, monetary: bool = False) -> _str: ... +def currency(val: float | Decimal, symbol: bool = True, grouping: bool = False, international: bool = False) -> _str: ... def delocalize(string: _str) -> _str: ... def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... def atoi(string: _str) -> int: ... diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 0d3e80ddcf00..c74afa45ded1 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -1,14 +1,14 @@ import sys import threading -from _typeshed import Self, StrPath, SupportsWrite +from _typeshed import StrPath, SupportsWrite from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from io import TextIOWrapper from re import Pattern from string import Template from time import struct_time from types import FrameType, TracebackType -from typing import Any, ClassVar, Generic, TextIO, TypeVar, Union, overload -from typing_extensions import Literal, TypeAlias +from typing import Any, ClassVar, Generic, TextIO, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 11): from types import GenericAlias @@ -61,10 +61,10 @@ __all__ = [ if sys.version_info >= (3, 11): __all__ += ["getLevelNamesMapping"] -_SysExcInfoType: TypeAlias = Union[tuple[type[BaseException], BaseException, TracebackType | None], tuple[None, None, None]] +_SysExcInfoType: TypeAlias = tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None] _ExcInfoType: TypeAlias = None | bool | _SysExcInfoType | BaseException _ArgsType: TypeAlias = tuple[object, ...] | Mapping[str, object] -_FilterType: TypeAlias = Filter | Callable[[LogRecord], int] +_FilterType: TypeAlias = Filter | Callable[[LogRecord], bool] _Level: TypeAlias = int | str _FormatStyle: TypeAlias = Literal["%", "{", "$"] @@ -81,7 +81,6 @@ _nameToLevel: dict[str, int] class Filterer: filters: list[Filter] - def __init__(self) -> None: ... def addFilter(self, filter: _FilterType) -> None: ... def removeFilter(self, filter: _FilterType) -> None: ... def filter(self, record: LogRecord) -> bool: ... @@ -107,11 +106,11 @@ class Logger(Filterer): disabled: bool # undocumented root: ClassVar[RootLogger] # undocumented manager: Manager # undocumented - def __init__(self, name: str, level: _Level = ...) -> None: ... + def __init__(self, name: str, level: _Level = 0) -> None: ... def setLevel(self, level: _Level) -> None: ... def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... - def getChild(self: Self, suffix: str) -> Self: ... # see python/typing#980 + def getChild(self, suffix: str) -> Self: ... # see python/typing#980 if sys.version_info >= (3, 8): def debug( self, @@ -162,7 +161,7 @@ class Logger(Filterer): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., stacklevel: int = ..., extra: Mapping[str, object] | None = ..., @@ -191,10 +190,10 @@ class Logger(Filterer): level: int, msg: object, args: _ArgsType, - exc_info: _ExcInfoType | None = ..., - extra: Mapping[str, object] | None = ..., - stack_info: bool = ..., - stacklevel: int = ..., + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, + stacklevel: int = 1, ) -> None: ... # undocumented else: def debug( @@ -258,7 +257,7 @@ class Logger(Filterer): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., extra: Mapping[str, object] | None = ..., ) -> None: ... @@ -267,18 +266,17 @@ class Logger(Filterer): level: int, msg: object, args: _ArgsType, - exc_info: _ExcInfoType | None = ..., - extra: Mapping[str, object] | None = ..., - stack_info: bool = ..., + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, ) -> None: ... # undocumented fatal = critical - def filter(self, record: LogRecord) -> bool: ... def addHandler(self, hdlr: Handler) -> None: ... def removeHandler(self, hdlr: Handler) -> None: ... if sys.version_info >= (3, 8): - def findCaller(self, stack_info: bool = ..., stacklevel: int = ...) -> tuple[str, int, str, str | None]: ... + def findCaller(self, stack_info: bool = False, stacklevel: int = 1) -> tuple[str, int, str, str | None]: ... else: - def findCaller(self, stack_info: bool = ...) -> tuple[str, int, str, str | None]: ... + def findCaller(self, stack_info: bool = False) -> tuple[str, int, str, str | None]: ... def handle(self, record: LogRecord) -> None: ... def makeRecord( @@ -290,9 +288,9 @@ class Logger(Filterer): msg: object, args: _ArgsType, exc_info: _SysExcInfoType | None, - func: str | None = ..., - extra: Mapping[str, object] | None = ..., - sinfo: str | None = ..., + func: str | None = None, + extra: Mapping[str, object] | None = None, + sinfo: str | None = None, ) -> LogRecord: ... def hasHandlers(self) -> bool: ... def callHandlers(self, record: LogRecord) -> None: ... # undocumented @@ -311,7 +309,7 @@ class Handler(Filterer): formatter: Formatter | None # undocumented lock: threading.Lock | None # undocumented name: str | None # undocumented - def __init__(self, level: _Level = ...) -> None: ... + def __init__(self, level: _Level = 0) -> None: ... def get_name(self) -> str: ... # undocumented def set_name(self, name: str) -> None: ... # undocumented def createLock(self) -> None: ... @@ -319,7 +317,6 @@ class Handler(Filterer): def release(self) -> None: ... def setLevel(self, level: _Level) -> None: ... def setFormatter(self, fmt: Formatter | None) -> None: ... - def filter(self, record: LogRecord) -> bool: ... def flush(self) -> None: ... def close(self) -> None: ... def handle(self, record: LogRecord) -> bool: ... @@ -341,22 +338,22 @@ class Formatter: if sys.version_info >= (3, 10): def __init__( self, - fmt: str | None = ..., - datefmt: str | None = ..., - style: _FormatStyle = ..., - validate: bool = ..., + fmt: str | None = None, + datefmt: str | None = None, + style: _FormatStyle = "%", + validate: bool = True, *, - defaults: Mapping[str, Any] | None = ..., + defaults: Mapping[str, Any] | None = None, ) -> None: ... elif sys.version_info >= (3, 8): def __init__( - self, fmt: str | None = ..., datefmt: str | None = ..., style: _FormatStyle = ..., validate: bool = ... + self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%", validate: bool = True ) -> None: ... else: - def __init__(self, fmt: str | None = ..., datefmt: str | None = ..., style: _FormatStyle = ...) -> None: ... + def __init__(self, fmt: str | None = None, datefmt: str | None = None, style: _FormatStyle = "%") -> None: ... def format(self, record: LogRecord) -> str: ... - def formatTime(self, record: LogRecord, datefmt: str | None = ...) -> str: ... + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: ... def formatException(self, ei: _SysExcInfoType) -> str: ... def formatMessage(self, record: LogRecord) -> str: ... # undocumented def formatStack(self, stack_info: str) -> str: ... @@ -364,7 +361,7 @@ class Formatter: class BufferingFormatter: linefmt: Formatter - def __init__(self, linefmt: Formatter | None = ...) -> None: ... + def __init__(self, linefmt: Formatter | None = None) -> None: ... def formatHeader(self, records: Sequence[LogRecord]) -> str: ... def formatFooter(self, records: Sequence[LogRecord]) -> str: ... def format(self, records: Sequence[LogRecord]) -> str: ... @@ -372,7 +369,7 @@ class BufferingFormatter: class Filter: name: str # undocumented nlen: int # undocumented - def __init__(self, name: str = ...) -> None: ... + def __init__(self, name: str = "") -> None: ... def filter(self, record: LogRecord) -> bool: ... class LogRecord: @@ -410,8 +407,8 @@ class LogRecord: msg: object, args: _ArgsType | None, exc_info: _SysExcInfoType | None, - func: str | None = ..., - sinfo: str | None = ..., + func: str | None = None, + sinfo: str | None = None, ) -> None: ... def getMessage(self) -> str: ... # Allows setting contextual information on LogRecord objects as per the docs, see #7833 @@ -424,7 +421,7 @@ class LoggerAdapter(Generic[_L]): manager: Manager # undocumented if sys.version_info >= (3, 10): extra: Mapping[str, object] | None - def __init__(self, logger: _L, extra: Mapping[str, object] | None = ...) -> None: ... + def __init__(self, logger: _L, extra: Mapping[str, object] | None = None) -> None: ... else: extra: Mapping[str, object] def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... @@ -485,7 +482,7 @@ class LoggerAdapter(Generic[_L]): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., stacklevel: int = ..., extra: Mapping[str, object] | None = ..., @@ -562,7 +559,7 @@ class LoggerAdapter(Generic[_L]): self, msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., extra: Mapping[str, object] | None = ..., **kwargs: object, @@ -596,16 +593,16 @@ class LoggerAdapter(Generic[_L]): level: int, msg: object, args: _ArgsType, - exc_info: _ExcInfoType | None = ..., - extra: Mapping[str, object] | None = ..., - stack_info: bool = ..., + exc_info: _ExcInfoType | None = None, + extra: Mapping[str, object] | None = None, + stack_info: bool = False, ) -> None: ... # undocumented @property def name(self) -> str: ... # undocumented if sys.version_info >= (3, 11): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -def getLogger(name: str | None = ...) -> Logger: ... +def getLogger(name: str | None = None) -> Logger: ... def getLoggerClass() -> type[Logger]: ... def getLogRecordFactory() -> Callable[..., LogRecord]: ... @@ -661,7 +658,7 @@ if sys.version_info >= (3, 8): def exception( msg: object, *args: object, - exc_info: _ExcInfoType = ..., + exc_info: _ExcInfoType = True, stack_info: bool = ..., stacklevel: int = ..., extra: Mapping[str, object] | None = ..., @@ -696,7 +693,11 @@ else: msg: object, *args: object, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Mapping[str, object] | None = ... ) -> None: ... def exception( - msg: object, *args: object, exc_info: _ExcInfoType = ..., stack_info: bool = ..., extra: Mapping[str, object] | None = ... + msg: object, + *args: object, + exc_info: _ExcInfoType = True, + stack_info: bool = ..., + extra: Mapping[str, object] | None = ..., ) -> None: ... def log( level: int, @@ -709,7 +710,7 @@ else: fatal = critical -def disable(level: int = ...) -> None: ... +def disable(level: int = 50) -> None: ... def addLevelName(level: int, levelName: str) -> None: ... def getLevelName(level: _Level) -> Any: ... @@ -774,7 +775,7 @@ class StreamHandler(Handler, Generic[_StreamT]): stream: _StreamT # undocumented terminator: str @overload - def __init__(self: StreamHandler[TextIO], stream: None = ...) -> None: ... + def __init__(self: StreamHandler[TextIO], stream: None = None) -> None: ... @overload def __init__(self: StreamHandler[_StreamT], stream: _StreamT) -> None: ... def setStream(self, stream: _StreamT) -> _StreamT | None: ... @@ -789,10 +790,10 @@ class FileHandler(StreamHandler[TextIOWrapper]): if sys.version_info >= (3, 9): errors: str | None # undocumented def __init__( - self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ..., errors: str | None = ... + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... else: - def __init__(self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ...) -> None: ... + def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... def _open(self) -> TextIOWrapper: ... # undocumented @@ -818,7 +819,7 @@ class PercentStyle: # undocumented validation_pattern: Pattern[str] _fmt: str if sys.version_info >= (3, 10): - def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = ...) -> None: ... + def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = None) -> None: ... else: def __init__(self, fmt: str) -> None: ... diff --git a/mypy/typeshed/stdlib/logging/config.pyi b/mypy/typeshed/stdlib/logging/config.pyi index 12e222680d2e..f76f655a6196 100644 --- a/mypy/typeshed/stdlib/logging/config.pyi +++ b/mypy/typeshed/stdlib/logging/config.pyi @@ -49,18 +49,18 @@ def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... if sys.version_info >= (3, 10): def fileConfig( fname: StrOrBytesPath | IO[str] | RawConfigParser, - defaults: dict[str, str] | None = ..., - disable_existing_loggers: bool = ..., - encoding: str | None = ..., + defaults: dict[str, str] | None = None, + disable_existing_loggers: bool = True, + encoding: str | None = None, ) -> None: ... else: def fileConfig( fname: StrOrBytesPath | IO[str] | RawConfigParser, - defaults: dict[str, str] | None = ..., - disable_existing_loggers: bool = ..., + defaults: dict[str, str] | None = None, + disable_existing_loggers: bool = True, ) -> None: ... def valid_ident(s: str) -> Literal[True]: ... # undocumented -def listen(port: int = ..., verify: Callable[[bytes], bytes | None] | None = ...) -> Thread: ... +def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... def stopListening() -> None: ... diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index eec4ed96953a..7e0bfd705895 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -2,7 +2,7 @@ import datetime import http.client import ssl import sys -from _typeshed import StrPath +from _typeshed import ReadableBuffer, StrPath from collections.abc import Callable from logging import FileHandler, Handler, LogRecord from queue import Queue, SimpleQueue @@ -22,10 +22,10 @@ class WatchedFileHandler(FileHandler): ino: int # undocumented if sys.version_info >= (3, 9): def __init__( - self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ..., errors: str | None = ... + self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... else: - def __init__(self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ...) -> None: ... + def __init__(self, filename: StrPath, mode: str = "a", encoding: str | None = None, delay: bool = False) -> None: ... def _statstream(self) -> None: ... # undocumented def reopenIfNeeded(self) -> None: ... @@ -35,10 +35,10 @@ class BaseRotatingHandler(FileHandler): rotator: Callable[[str, str], None] | None if sys.version_info >= (3, 9): def __init__( - self, filename: StrPath, mode: str, encoding: str | None = ..., delay: bool = ..., errors: str | None = ... + self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False, errors: str | None = None ) -> None: ... else: - def __init__(self, filename: StrPath, mode: str, encoding: str | None = ..., delay: bool = ...) -> None: ... + def __init__(self, filename: StrPath, mode: str, encoding: str | None = None, delay: bool = False) -> None: ... def rotation_filename(self, default_name: str) -> str: ... def rotate(self, source: str, dest: str) -> None: ... @@ -50,22 +50,22 @@ class RotatingFileHandler(BaseRotatingHandler): def __init__( self, filename: StrPath, - mode: str = ..., - maxBytes: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., - errors: str | None = ..., + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + errors: str | None = None, ) -> None: ... else: def __init__( self, filename: StrPath, - mode: str = ..., - maxBytes: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., + mode: str = "a", + maxBytes: int = 0, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, ) -> None: ... def doRollover(self) -> None: ... @@ -85,26 +85,26 @@ class TimedRotatingFileHandler(BaseRotatingHandler): def __init__( self, filename: StrPath, - when: str = ..., - interval: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., - utc: bool = ..., - atTime: datetime.time | None = ..., - errors: str | None = ..., + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, + errors: str | None = None, ) -> None: ... else: def __init__( self, filename: StrPath, - when: str = ..., - interval: int = ..., - backupCount: int = ..., - encoding: str | None = ..., - delay: bool = ..., - utc: bool = ..., - atTime: datetime.time | None = ..., + when: str = "h", + interval: int = 1, + backupCount: int = 0, + encoding: str | None = None, + delay: bool = False, + utc: bool = False, + atTime: datetime.time | None = None, ) -> None: ... def doRollover(self) -> None: ... @@ -123,9 +123,9 @@ class SocketHandler(Handler): retryFactor: float # undocumented retryMax: float # undocumented def __init__(self, host: str, port: int | None) -> None: ... - def makeSocket(self, timeout: float = ...) -> socket: ... # timeout is undocumented + def makeSocket(self, timeout: float = 1) -> socket: ... # timeout is undocumented def makePickle(self, record: LogRecord) -> bytes: ... - def send(self, s: bytes) -> None: ... + def send(self, s: ReadableBuffer) -> None: ... def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): @@ -177,7 +177,7 @@ class SysLogHandler(Handler): priority_names: ClassVar[dict[str, int]] # undocumented facility_names: ClassVar[dict[str, int]] # undocumented priority_map: ClassVar[dict[str, str]] # undocumented - def __init__(self, address: tuple[str, int] | str = ..., facility: int = ..., socktype: SocketKind | None = ...) -> None: ... + def __init__(self, address: tuple[str, int] | str = ..., facility: int = 1, socktype: SocketKind | None = None) -> None: ... if sys.version_info >= (3, 11): def createSocket(self) -> None: ... @@ -185,7 +185,7 @@ class SysLogHandler(Handler): def mapPriority(self, levelName: str) -> str: ... class NTEventLogHandler(Handler): - def __init__(self, appname: str, dllname: str | None = ..., logtype: str = ...) -> None: ... + def __init__(self, appname: str, dllname: str | None = None, logtype: str = "Application") -> None: ... def getEventCategory(self, record: LogRecord) -> int: ... # TODO correct return value? def getEventType(self, record: LogRecord) -> int: ... @@ -208,9 +208,9 @@ class SMTPHandler(Handler): fromaddr: str, toaddrs: str | list[str], subject: str, - credentials: tuple[str, str] | None = ..., - secure: tuple[()] | tuple[str] | tuple[str, str] | None = ..., - timeout: float = ..., + credentials: tuple[str, str] | None = None, + secure: tuple[()] | tuple[str] | tuple[str, str] | None = None, + timeout: float = 5.0, ) -> None: ... def getSubject(self, record: LogRecord) -> str: ... @@ -224,7 +224,7 @@ class MemoryHandler(BufferingHandler): flushLevel: int # undocumented target: Handler | None # undocumented flushOnClose: bool # undocumented - def __init__(self, capacity: int, flushLevel: int = ..., target: Handler | None = ..., flushOnClose: bool = ...) -> None: ... + def __init__(self, capacity: int, flushLevel: int = 40, target: Handler | None = None, flushOnClose: bool = True) -> None: ... def setTarget(self, target: Handler | None) -> None: ... class HTTPHandler(Handler): @@ -238,10 +238,10 @@ class HTTPHandler(Handler): self, host: str, url: str, - method: str = ..., - secure: bool = ..., - credentials: tuple[str, str] | None = ..., - context: ssl.SSLContext | None = ..., + method: str = "GET", + secure: bool = False, + credentials: tuple[str, str] | None = None, + context: ssl.SSLContext | None = None, ) -> None: ... def mapLogRecord(self, record: LogRecord) -> dict[str, Any]: ... if sys.version_info >= (3, 9): @@ -257,7 +257,7 @@ class QueueListener: handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented queue: SimpleQueue[Any] | Queue[Any] # undocumented - def __init__(self, queue: SimpleQueue[Any] | Queue[Any], *handlers: Handler, respect_handler_level: bool = ...) -> None: ... + def __init__(self, queue: SimpleQueue[Any] | Queue[Any], *handlers: Handler, respect_handler_level: bool = False) -> None: ... def dequeue(self, block: bool) -> LogRecord: ... def prepare(self, record: LogRecord) -> Any: ... def start(self) -> None: ... diff --git a/mypy/typeshed/stdlib/lzma.pyi b/mypy/typeshed/stdlib/lzma.pyi index d4c7977b8d0a..34bd6f3f8db1 100644 --- a/mypy/typeshed/stdlib/lzma.pyi +++ b/mypy/typeshed/stdlib/lzma.pyi @@ -1,8 +1,8 @@ import io -from _typeshed import ReadableBuffer, Self, StrOrBytesPath +from _typeshed import ReadableBuffer, StrOrBytesPath from collections.abc import Mapping, Sequence from typing import IO, Any, TextIO, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final __all__ = [ "CHECK_NONE", @@ -83,7 +83,7 @@ PRESET_EXTREME: int # v big number @final class LZMADecompressor: def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ... - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ... @property def check(self) -> int: ... @property @@ -99,7 +99,7 @@ class LZMACompressor: def __init__( self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... ) -> None: ... - def compress(self, __data: bytes) -> bytes: ... + def compress(self, __data: ReadableBuffer) -> bytes: ... def flush(self) -> bytes: ... class LZMAError(Exception): ... @@ -107,97 +107,91 @@ class LZMAError(Exception): ... class LZMAFile(io.BufferedIOBase, IO[bytes]): def __init__( self, - filename: _PathOrFile | None = ..., - mode: str = ..., + filename: _PathOrFile | None = None, + mode: str = "r", *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, ) -> None: ... - def __enter__(self: Self) -> Self: ... - def close(self) -> None: ... - @property - def closed(self) -> bool: ... - def fileno(self) -> int: ... - def seekable(self) -> bool: ... - def readable(self) -> bool: ... - def writable(self) -> bool: ... - def peek(self, size: int = ...) -> bytes: ... - def read(self, size: int | None = ...) -> bytes: ... - def read1(self, size: int = ...) -> bytes: ... - def readline(self, size: int | None = ...) -> bytes: ... + def __enter__(self) -> Self: ... + def peek(self, size: int = -1) -> bytes: ... + def read(self, size: int | None = -1) -> bytes: ... + def read1(self, size: int = -1) -> bytes: ... + def readline(self, size: int | None = -1) -> bytes: ... def write(self, data: ReadableBuffer) -> int: ... - def seek(self, offset: int, whence: int = ...) -> int: ... - def tell(self) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... @overload def open( filename: _PathOrFile, - mode: Literal["r", "rb"] = ..., + mode: Literal["r", "rb"] = "rb", *, - format: int | None = ..., - check: Literal[-1] = ..., - preset: None = ..., - filters: _FilterChain | None = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> LZMAFile: ... @overload def open( filename: _PathOrFile, mode: _OpenBinaryWritingMode, *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> LZMAFile: ... @overload def open( filename: StrOrBytesPath, mode: Literal["rt"], *, - format: int | None = ..., - check: Literal[-1] = ..., - preset: None = ..., - filters: _FilterChain | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + format: int | None = None, + check: Literal[-1] = -1, + preset: None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: StrOrBytesPath, mode: _OpenTextWritingMode, *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIO: ... @overload def open( filename: _PathOrFile, mode: str, *, - format: int | None = ..., - check: int = ..., - preset: int | None = ..., - filters: _FilterChain | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + format: int | None = None, + check: int = -1, + preset: int | None = None, + filters: _FilterChain | None = None, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> LZMAFile | TextIO: ... def compress( - data: bytes, format: int = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... + data: ReadableBuffer, format: int = 1, check: int = -1, preset: int | None = None, filters: _FilterChain | None = None +) -> bytes: ... +def decompress( + data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None ) -> bytes: ... -def decompress(data: bytes, format: int = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> bytes: ... def is_check_supported(__check_id: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/mailbox.pyi b/mypy/typeshed/stdlib/mailbox.pyi index 3169e8cfa689..8053fad88ea5 100644 --- a/mypy/typeshed/stdlib/mailbox.pyi +++ b/mypy/typeshed/stdlib/mailbox.pyi @@ -1,11 +1,12 @@ import email.message +import io import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import StrPath, SupportsNoArgReadline, SupportsRead from abc import ABCMeta, abstractmethod from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, Protocol, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -32,7 +33,10 @@ __all__ = [ _T = TypeVar("_T") _MessageT = TypeVar("_MessageT", bound=Message) -_MessageData: TypeAlias = email.message.Message | bytes | str | IO[str] | IO[bytes] + +class _SupportsReadAndReadline(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +_MessageData: TypeAlias = email.message.Message | bytes | str | io.StringIO | _SupportsReadAndReadline class _HasIteritems(Protocol): def iteritems(self) -> Iterator[tuple[str, _MessageData]]: ... @@ -43,13 +47,12 @@ class _HasItems(Protocol): linesep: bytes class Mailbox(Generic[_MessageT]): - - _path: bytes | str # undocumented + _path: str # undocumented _factory: Callable[[IO[Any]], _MessageT] | None # undocumented @overload - def __init__(self, path: StrOrBytesPath, factory: Callable[[IO[Any]], _MessageT], create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], _MessageT], create: bool = True) -> None: ... @overload - def __init__(self, path: StrOrBytesPath, factory: None = ..., create: bool = ...) -> None: ... + def __init__(self, path: StrPath, factory: None = None, create: bool = True) -> None: ... @abstractmethod def add(self, message: _MessageData) -> str: ... @abstractmethod @@ -59,7 +62,7 @@ class Mailbox(Generic[_MessageT]): @abstractmethod def __setitem__(self, key: str, message: _MessageData) -> None: ... @overload - def get(self, key: str, default: None = ...) -> _MessageT | None: ... + def get(self, key: str, default: None = None) -> _MessageT | None: ... @overload def get(self, key: str, default: _T) -> _MessageT | _T: ... def __getitem__(self, key: str) -> _MessageT: ... @@ -85,11 +88,11 @@ class Mailbox(Generic[_MessageT]): def __len__(self) -> int: ... def clear(self) -> None: ... @overload - def pop(self, key: str, default: None = ...) -> _MessageT | None: ... + def pop(self, key: str, default: None = None) -> _MessageT | None: ... @overload - def pop(self, key: str, default: _T = ...) -> _MessageT | _T: ... + def pop(self, key: str, default: _T) -> _MessageT | _T: ... def popitem(self) -> tuple[str, _MessageT]: ... - def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = ...) -> None: ... + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = None) -> None: ... @abstractmethod def flush(self) -> None: ... @abstractmethod @@ -102,10 +105,9 @@ class Mailbox(Generic[_MessageT]): def __class_getitem__(cls, item: Any) -> GenericAlias: ... class Maildir(Mailbox[MaildirMessage]): - colon: str def __init__( - self, dirname: StrOrBytesPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... + self, dirname: StrPath, factory: Callable[[IO[Any]], MaildirMessage] | None = None, create: bool = True ) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... @@ -141,24 +143,18 @@ class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): class _mboxMMDF(_singlefileMailbox[_MessageT]): def get_message(self, key: str) -> _MessageT: ... - def get_file(self, key: str, from_: bool = ...) -> _PartialFile[bytes]: ... - def get_bytes(self, key: str, from_: bool = ...) -> bytes: ... - def get_string(self, key: str, from_: bool = ...) -> str: ... + def get_file(self, key: str, from_: bool = False) -> _PartialFile[bytes]: ... + def get_bytes(self, key: str, from_: bool = False) -> bytes: ... + def get_string(self, key: str, from_: bool = False) -> str: ... class mbox(_mboxMMDF[mboxMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], mboxMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], mboxMessage] | None = None, create: bool = True) -> None: ... class MMDF(_mboxMMDF[MMDFMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MMDFMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MMDFMessage] | None = None, create: bool = True) -> None: ... class MH(Mailbox[MHMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], MHMessage] | None = None, create: bool = True) -> None: ... def add(self, message: _MessageData) -> str: ... def remove(self, key: str) -> None: ... def __setitem__(self, key: str, message: _MessageData) -> None: ... @@ -173,24 +169,22 @@ class MH(Mailbox[MHMessage]): def unlock(self) -> None: ... def close(self) -> None: ... def list_folders(self) -> list[str]: ... - def get_folder(self, folder: StrOrBytesPath) -> MH: ... - def add_folder(self, folder: StrOrBytesPath) -> MH: ... - def remove_folder(self, folder: StrOrBytesPath) -> None: ... + def get_folder(self, folder: StrPath) -> MH: ... + def add_folder(self, folder: StrPath) -> MH: ... + def remove_folder(self, folder: StrPath) -> None: ... def get_sequences(self) -> dict[str, list[int]]: ... def set_sequences(self, sequences: Mapping[str, Sequence[int]]) -> None: ... def pack(self) -> None: ... class Babyl(_singlefileMailbox[BabylMessage]): - def __init__( - self, path: StrOrBytesPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ... - ) -> None: ... + def __init__(self, path: StrPath, factory: Callable[[IO[Any]], BabylMessage] | None = None, create: bool = True) -> None: ... def get_message(self, key: str) -> BabylMessage: ... def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> IO[bytes]: ... def get_labels(self) -> list[str]: ... class Message(email.message.Message): - def __init__(self, message: _MessageData | None = ...) -> None: ... + def __init__(self, message: _MessageData | None = None) -> None: ... class MaildirMessage(Message): def get_subdir(self) -> str: ... @@ -206,7 +200,7 @@ class MaildirMessage(Message): class _mboxMMDFMessage(Message): def get_from(self) -> str: ... - def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = ...) -> None: ... + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = None) -> None: ... def get_flags(self) -> str: ... def set_flags(self, flags: Iterable[str]) -> None: ... def add_flag(self, flag: str) -> None: ... @@ -232,16 +226,16 @@ class BabylMessage(Message): class MMDFMessage(_mboxMMDFMessage): ... class _ProxyFile(Generic[AnyStr]): - def __init__(self, f: IO[AnyStr], pos: int | None = ...) -> None: ... - def read(self, size: int | None = ...) -> AnyStr: ... - def read1(self, size: int | None = ...) -> AnyStr: ... - def readline(self, size: int | None = ...) -> AnyStr: ... - def readlines(self, sizehint: int | None = ...) -> list[AnyStr]: ... + def __init__(self, f: IO[AnyStr], pos: int | None = None) -> None: ... + def read(self, size: int | None = None) -> AnyStr: ... + def read1(self, size: int | None = None) -> AnyStr: ... + def readline(self, size: int | None = None) -> AnyStr: ... + def readlines(self, sizehint: int | None = None) -> list[AnyStr]: ... def __iter__(self) -> Iterator[AnyStr]: ... def tell(self) -> int: ... - def seek(self, offset: int, whence: int = ...) -> None: ... + def seek(self, offset: int, whence: int = 0) -> None: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... @@ -253,7 +247,7 @@ class _ProxyFile(Generic[AnyStr]): def __class_getitem__(cls, item: Any) -> GenericAlias: ... class _PartialFile(_ProxyFile[AnyStr]): - def __init__(self, f: IO[AnyStr], start: int | None = ..., stop: int | None = ...) -> None: ... + def __init__(self, f: IO[AnyStr], start: int | None = None, stop: int | None = None) -> None: ... class Error(Exception): ... class NoSuchMailboxError(Error): ... diff --git a/mypy/typeshed/stdlib/mailcap.pyi b/mypy/typeshed/stdlib/mailcap.pyi index e1637ad6e7be..5905f5826bf7 100644 --- a/mypy/typeshed/stdlib/mailcap.pyi +++ b/mypy/typeshed/stdlib/mailcap.pyi @@ -6,6 +6,6 @@ _Cap: TypeAlias = dict[str, str | int] __all__ = ["getcaps", "findmatch"] def findmatch( - caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = ..., filename: str = ..., plist: Sequence[str] = ... + caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = ... ) -> tuple[str | None, _Cap | None]: ... def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/mypy/typeshed/stdlib/marshal.pyi b/mypy/typeshed/stdlib/marshal.pyi index b2fde674a647..21f05c908479 100644 --- a/mypy/typeshed/stdlib/marshal.pyi +++ b/mypy/typeshed/stdlib/marshal.pyi @@ -1,8 +1,33 @@ -from typing import IO, Any +import builtins +import types +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite +from typing import Any +from typing_extensions import TypeAlias version: int -def dump(__value: Any, __file: IO[Any], __version: int = ...) -> None: ... -def load(__file: IO[Any]) -> Any: ... -def dumps(__value: Any, __version: int = ...) -> bytes: ... -def loads(__bytes: bytes) -> Any: ... +_Marshallable: TypeAlias = ( + # handled in w_object() in marshal.c + None + | type[StopIteration] + | builtins.ellipsis + | bool + # handled in w_complex_object() in marshal.c + | int + | float + | complex + | bytes + | str + | tuple[_Marshallable, ...] + | list[Any] + | dict[Any, Any] + | set[Any] + | frozenset[_Marshallable] + | types.CodeType + | ReadableBuffer +) + +def dump(__value: _Marshallable, __file: SupportsWrite[bytes], __version: int = 4) -> None: ... +def load(__file: SupportsRead[bytes]) -> Any: ... +def dumps(__value: _Marshallable, __version: int = 4) -> bytes: ... +def loads(__bytes: ReadableBuffer) -> Any: ... diff --git a/mypy/typeshed/stdlib/math.pyi b/mypy/typeshed/stdlib/math.pyi index 58eda98d8977..231964f397db 100644 --- a/mypy/typeshed/stdlib/math.pyi +++ b/mypy/typeshed/stdlib/math.pyi @@ -1,9 +1,11 @@ import sys -from _typeshed import SupportsTrunc from collections.abc import Iterable -from typing import SupportsFloat, overload +from typing import Protocol, SupportsFloat, TypeVar, overload from typing_extensions import SupportsIndex, TypeAlias +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + if sys.version_info >= (3, 8): _SupportsFloatOrIndex: TypeAlias = SupportsFloat | SupportsIndex else: @@ -26,6 +28,12 @@ def atanh(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 11): def cbrt(__x: _SupportsFloatOrIndex) -> float: ... +class _SupportsCeil(Protocol[_T_co]): + def __ceil__(self) -> _T_co: ... + +@overload +def ceil(__x: _SupportsCeil[_T]) -> _T: ... +@overload def ceil(__x: _SupportsFloatOrIndex) -> int: ... if sys.version_info >= (3, 8): @@ -55,6 +63,12 @@ if sys.version_info >= (3, 8): else: def factorial(__x: int) -> int: ... +class _SupportsFloor(Protocol[_T_co]): + def __floor__(self) -> _T_co: ... + +@overload +def floor(__x: _SupportsFloor[_T]) -> _T: ... +@overload def floor(__x: _SupportsFloatOrIndex) -> int: ... def fmod(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... def frexp(__x: _SupportsFloatOrIndex) -> tuple[float, int]: ... @@ -77,8 +91,8 @@ def isclose( a: _SupportsFloatOrIndex, b: _SupportsFloatOrIndex, *, - rel_tol: _SupportsFloatOrIndex = ..., - abs_tol: _SupportsFloatOrIndex = ..., + rel_tol: _SupportsFloatOrIndex = 1e-09, + abs_tol: _SupportsFloatOrIndex = 0.0, ) -> bool: ... def isinf(__x: _SupportsFloatOrIndex) -> bool: ... def isfinite(__x: _SupportsFloatOrIndex) -> bool: ... @@ -102,15 +116,15 @@ if sys.version_info >= (3, 9): def nextafter(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): - def perm(__n: SupportsIndex, __k: SupportsIndex | None = ...) -> int: ... + def perm(__n: SupportsIndex, __k: SupportsIndex | None = None) -> int: ... def pow(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): @overload - def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = ...) -> int: ... # type: ignore[misc] + def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = 1) -> int: ... # type: ignore[misc] @overload - def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = ...) -> float: ... + def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = 1) -> float: ... def radians(__x: _SupportsFloatOrIndex) -> float: ... def remainder(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... @@ -119,7 +133,12 @@ def sinh(__x: _SupportsFloatOrIndex) -> float: ... def sqrt(__x: _SupportsFloatOrIndex) -> float: ... def tan(__x: _SupportsFloatOrIndex) -> float: ... def tanh(__x: _SupportsFloatOrIndex) -> float: ... -def trunc(__x: SupportsTrunc) -> int: ... + +# Is different from `_typeshed.SupportsTrunc`, which is not generic +class _SupportsTrunc(Protocol[_T_co]): + def __trunc__(self) -> _T_co: ... + +def trunc(__x: _SupportsTrunc[_T]) -> _T: ... if sys.version_info >= (3, 9): def ulp(__x: _SupportsFloatOrIndex) -> float: ... diff --git a/mypy/typeshed/stdlib/mimetypes.pyi b/mypy/typeshed/stdlib/mimetypes.pyi index c2b6ff20281a..fd3908680009 100644 --- a/mypy/typeshed/stdlib/mimetypes.pyi +++ b/mypy/typeshed/stdlib/mimetypes.pyi @@ -20,16 +20,16 @@ __all__ = [ ] if sys.version_info >= (3, 8): - def guess_type(url: StrPath, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... else: - def guess_type(url: str, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(url: str, strict: bool = True) -> tuple[str | None, str | None]: ... -def guess_all_extensions(type: str, strict: bool = ...) -> list[str]: ... -def guess_extension(type: str, strict: bool = ...) -> str | None: ... -def init(files: Sequence[str] | None = ...) -> None: ... +def guess_all_extensions(type: str, strict: bool = True) -> list[str]: ... +def guess_extension(type: str, strict: bool = True) -> str | None: ... +def init(files: Sequence[str] | None = None) -> None: ... def read_mime_types(file: str) -> dict[str, str] | None: ... -def add_type(type: str, ext: str, strict: bool = ...) -> None: ... +def add_type(type: str, ext: str, strict: bool = True) -> None: ... inited: bool knownfiles: list[str] @@ -43,15 +43,15 @@ class MimeTypes: encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] - def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = ...) -> None: ... - def guess_extension(self, type: str, strict: bool = ...) -> str | None: ... + def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = True) -> None: ... + def guess_extension(self, type: str, strict: bool = True) -> str | None: ... if sys.version_info >= (3, 8): - def guess_type(self, url: StrPath, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... else: - def guess_type(self, url: str, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_type(self, url: str, strict: bool = True) -> tuple[str | None, str | None]: ... - def guess_all_extensions(self, type: str, strict: bool = ...) -> list[str]: ... - def read(self, filename: str, strict: bool = ...) -> None: ... - def readfp(self, fp: IO[str], strict: bool = ...) -> None: ... + def guess_all_extensions(self, type: str, strict: bool = True) -> list[str]: ... + def read(self, filename: str, strict: bool = True) -> None: ... + def readfp(self, fp: IO[str], strict: bool = True) -> None: ... if sys.platform == "win32": - def read_windows_registry(self, strict: bool = ...) -> None: ... + def read_windows_registry(self, strict: bool = True) -> None: ... diff --git a/mypy/typeshed/stdlib/mmap.pyi b/mypy/typeshed/stdlib/mmap.pyi index 8dbec2388838..c74ad3cda6db 100644 --- a/mypy/typeshed/stdlib/mmap.pyi +++ b/mypy/typeshed/stdlib/mmap.pyi @@ -1,7 +1,8 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer, Unused from collections.abc import Iterable, Iterator, Sized from typing import NoReturn, overload +from typing_extensions import Self ACCESS_DEFAULT: int ACCESS_READ: int @@ -67,11 +68,14 @@ class mmap(Iterable[int], Sized): def __setitem__(self, __index: int, __object: int) -> None: ... @overload def __setitem__(self, __index: slice, __object: ReadableBuffer) -> None: ... - # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and - # __len__, so we claim that there is also an __iter__ to help type checkers. + # Doesn't actually exist, but the object actually supports "in" because it has __getitem__, + # so we claim that there is also a __contains__ to help type checkers. + def __contains__(self, __o: object) -> bool: ... + # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and __len__, + # so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... if sys.version_info >= (3, 8) and sys.platform != "win32": MADV_NORMAL: int diff --git a/mypy/typeshed/stdlib/modulefinder.pyi b/mypy/typeshed/stdlib/modulefinder.pyi index caed7efadccc..6f1917644b06 100644 --- a/mypy/typeshed/stdlib/modulefinder.pyi +++ b/mypy/typeshed/stdlib/modulefinder.pyi @@ -20,10 +20,9 @@ replacePackageMap: dict[str, str] # undocumented def ReplacePackage(oldname: str, newname: str) -> None: ... class Module: # undocumented - def __init__(self, name: str, file: str | None = ..., path: str | None = ...) -> None: ... + def __init__(self, name: str, file: str | None = None, path: str | None = None) -> None: ... class ModuleFinder: - modules: dict[str, Module] path: list[str] # undocumented badmodules: dict[str, dict[str, int]] # undocumented @@ -35,16 +34,16 @@ class ModuleFinder: if sys.version_info >= (3, 8): def __init__( self, - path: list[str] | None = ..., - debug: int = ..., - excludes: Container[str] | None = ..., - replace_paths: Sequence[tuple[str, str]] | None = ..., + path: list[str] | None = None, + debug: int = 0, + excludes: Container[str] | None = None, + replace_paths: Sequence[tuple[str, str]] | None = None, ) -> None: ... else: def __init__( self, - path: list[str] | None = ..., - debug: int = ..., + path: list[str] | None = None, + debug: int = 0, excludes: Container[str] = ..., replace_paths: Sequence[tuple[str, str]] = ..., ) -> None: ... @@ -55,12 +54,12 @@ class ModuleFinder: def run_script(self, pathname: str) -> None: ... def load_file(self, pathname: str) -> None: ... # undocumented def import_hook( - self, name: str, caller: Module | None = ..., fromlist: list[str] | None = ..., level: int = ... + self, name: str, caller: Module | None = None, fromlist: list[str] | None = None, level: int = -1 ) -> Module | None: ... # undocumented - def determine_parent(self, caller: Module | None, level: int = ...) -> Module | None: ... # undocumented + def determine_parent(self, caller: Module | None, level: int = -1) -> Module | None: ... # undocumented def find_head_package(self, parent: Module, name: str) -> tuple[Module, str]: ... # undocumented def load_tail(self, q: Module, tail: str) -> Module: ... # undocumented - def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = ...) -> None: ... # undocumented + def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = 0) -> None: ... # undocumented def find_all_submodules(self, m: Module) -> Iterable[str]: ... # undocumented def import_module(self, partname: str, fqname: str, parent: Module) -> Module | None: ... # undocumented def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: tuple[str, str, str]) -> Module: ... # undocumented @@ -69,7 +68,7 @@ class ModuleFinder: def load_package(self, fqname: str, pathname: str) -> Module: ... # undocumented def add_module(self, fqname: str) -> Module: ... # undocumented def find_module( - self, name: str, path: str | None, parent: Module | None = ... + self, name: str, path: str | None, parent: Module | None = None ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented def report(self) -> None: ... def any_missing(self) -> list[str]: ... # undocumented diff --git a/mypy/typeshed/stdlib/msilib/__init__.pyi b/mypy/typeshed/stdlib/msilib/__init__.pyi index 0e18350b226e..9f7367d152ba 100644 --- a/mypy/typeshed/stdlib/msilib/__init__.pyi +++ b/mypy/typeshed/stdlib/msilib/__init__.pyi @@ -24,7 +24,6 @@ if sys.platform == "win32": knownbits: Literal[0x3FFF] class Table: - name: str fields: list[tuple[int, str, int]] def __init__(self, name: str) -> None: ... @@ -50,7 +49,6 @@ if sys.platform == "win32": def gen_uuid() -> str: ... class CAB: - name: str files: list[tuple[str, str]] filenames: set[str] @@ -62,7 +60,6 @@ if sys.platform == "win32": _directories: set[str] class Directory: - db: _Database cab: CAB basedir: str @@ -82,28 +79,26 @@ if sys.platform == "win32": physical: str, _logical: str, default: str, - componentflags: int | None = ..., + componentflags: int | None = None, ) -> None: ... def start_component( self, - component: str | None = ..., - feature: Feature | None = ..., - flags: int | None = ..., - keyfile: str | None = ..., - uuid: str | None = ..., + component: str | None = None, + feature: Feature | None = None, + flags: int | None = None, + keyfile: str | None = None, + uuid: str | None = None, ) -> None: ... def make_short(self, file: str) -> str: ... - def add_file(self, file: str, src: str | None = ..., version: str | None = ..., language: str | None = ...) -> str: ... - def glob(self, pattern: str, exclude: Container[str] | None = ...) -> list[str]: ... + def add_file(self, file: str, src: str | None = None, version: str | None = None, language: str | None = None) -> str: ... + def glob(self, pattern: str, exclude: Container[str] | None = None) -> list[str]: ... def remove_pyc(self) -> None: ... class Binary: - name: str def __init__(self, fname: str) -> None: ... class Feature: - id: str def __init__( self, @@ -112,31 +107,28 @@ if sys.platform == "win32": title: str, desc: str, display: int, - level: int = ..., - parent: Feature | None = ..., - directory: str | None = ..., - attributes: int = ..., + level: int = 1, + parent: Feature | None = None, + directory: str | None = None, + attributes: int = 0, ) -> None: ... def set_current(self) -> None: ... class Control: - dlg: Dialog name: str def __init__(self, dlg: Dialog, name: str) -> None: ... - def event(self, event: str, argument: str, condition: str = ..., ordering: int | None = ...) -> None: ... + def event(self, event: str, argument: str, condition: str = "1", ordering: int | None = None) -> None: ... def mapping(self, event: str, attribute: str) -> None: ... def condition(self, action: str, condition: str) -> None: ... class RadioButtonGroup(Control): - property: str index: int def __init__(self, dlg: Dialog, name: str, property: str) -> None: ... - def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = ...) -> None: ... + def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = None) -> None: ... class Dialog: - db: _Database name: str x: int diff --git a/mypy/typeshed/stdlib/msilib/sequence.pyi b/mypy/typeshed/stdlib/msilib/sequence.pyi index 9cc1e0eaec01..b8af09f46e65 100644 --- a/mypy/typeshed/stdlib/msilib/sequence.pyi +++ b/mypy/typeshed/stdlib/msilib/sequence.pyi @@ -2,7 +2,6 @@ import sys from typing_extensions import TypeAlias if sys.platform == "win32": - _SequenceType: TypeAlias = list[tuple[str, str | None, int]] AdminExecuteSequence: _SequenceType diff --git a/mypy/typeshed/stdlib/msilib/text.pyi b/mypy/typeshed/stdlib/msilib/text.pyi index 879429ecea85..1353cf8a2392 100644 --- a/mypy/typeshed/stdlib/msilib/text.pyi +++ b/mypy/typeshed/stdlib/msilib/text.pyi @@ -1,7 +1,6 @@ import sys if sys.platform == "win32": - ActionText: list[tuple[str, str, str | None]] UIText: list[tuple[str, str | None]] diff --git a/mypy/typeshed/stdlib/msvcrt.pyi b/mypy/typeshed/stdlib/msvcrt.pyi index 0bea8ce22b06..5849b9b00ca0 100644 --- a/mypy/typeshed/stdlib/msvcrt.pyi +++ b/mypy/typeshed/stdlib/msvcrt.pyi @@ -21,8 +21,8 @@ if sys.platform == "win32": def getwch() -> str: ... def getche() -> bytes: ... def getwche() -> str: ... - def putch(__char: bytes) -> None: ... + def putch(__char: bytes | bytearray) -> None: ... def putwch(__unicode_char: str) -> None: ... - def ungetch(__char: bytes) -> None: ... + def ungetch(__char: bytes | bytearray) -> None: ... def ungetwch(__unicode_char: str) -> None: ... def heapmin() -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/connection.pyi index 489e8bd9a9f1..d034373712e0 100644 --- a/mypy/typeshed/stdlib/multiprocessing/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/connection.pyi @@ -1,18 +1,18 @@ import socket import sys import types -from _typeshed import Self +from _typeshed import ReadableBuffer from collections.abc import Iterable -from typing import Any, Union -from typing_extensions import SupportsIndex, TypeAlias +from typing import Any +from typing_extensions import Self, SupportsIndex, TypeAlias __all__ = ["Client", "Listener", "Pipe", "wait"] # https://docs.python.org/3/library/multiprocessing.html#address-formats -_Address: TypeAlias = Union[str, tuple[str, int]] +_Address: TypeAlias = str | tuple[str, int] class _ConnectionBase: - def __init__(self, handle: SupportsIndex, readable: bool = ..., writable: bool = ...) -> None: ... + def __init__(self, handle: SupportsIndex, readable: bool = True, writable: bool = True) -> None: ... @property def closed(self) -> bool: ... # undocumented @property @@ -21,13 +21,13 @@ class _ConnectionBase: def writable(self) -> bool: ... # undocumented def fileno(self) -> int: ... def close(self) -> None: ... - def send_bytes(self, buf: bytes, offset: int = ..., size: int | None = ...) -> None: ... + def send_bytes(self, buf: ReadableBuffer, offset: int = 0, size: int | None = None) -> None: ... def send(self, obj: Any) -> None: ... - def recv_bytes(self, maxlength: int | None = ...) -> bytes: ... - def recv_bytes_into(self, buf: Any, offset: int = ...) -> int: ... + def recv_bytes(self, maxlength: int | None = None) -> bytes: ... + def recv_bytes_into(self, buf: Any, offset: int = 0) -> int: ... def recv(self) -> Any: ... - def poll(self, timeout: float | None = ...) -> bool: ... - def __enter__(self: Self) -> Self: ... + def poll(self, timeout: float | None = 0.0) -> bool: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -39,7 +39,7 @@ if sys.platform == "win32": class Listener: def __init__( - self, address: _Address | None = ..., family: str | None = ..., backlog: int = ..., authkey: bytes | None = ... + self, address: _Address | None = None, family: str | None = None, backlog: int = 1, authkey: bytes | None = None ) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... @@ -47,7 +47,7 @@ class Listener: def address(self) -> _Address: ... @property def last_accepted(self) -> _Address | None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -55,7 +55,15 @@ class Listener: def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... def answer_challenge(connection: Connection, authkey: bytes) -> None: ... def wait( - object_list: Iterable[Connection | socket.socket | int], timeout: float | None = ... + object_list: Iterable[Connection | socket.socket | int], timeout: float | None = None ) -> list[Connection | socket.socket | int]: ... -def Client(address: _Address, family: str | None = ..., authkey: bytes | None = ...) -> Connection: ... -def Pipe(duplex: bool = ...) -> tuple[_ConnectionBase, _ConnectionBase]: ... +def Client(address: _Address, family: str | None = None, authkey: bytes | None = None) -> Connection: ... + +# N.B. Keep this in sync with multiprocessing.context.BaseContext.Pipe. +# _ConnectionBase is the common base class of Connection and PipeConnection +# and can be used in cross-platform code. +if sys.platform != "win32": + def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... + +else: + def Pipe(duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/context.pyi b/mypy/typeshed/stdlib/multiprocessing/context.pyi index 16b7cfe9e890..c498649a7b61 100644 --- a/mypy/typeshed/stdlib/multiprocessing/context.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -2,9 +2,8 @@ import ctypes import sys from collections.abc import Callable, Iterable, Sequence from ctypes import _CData -from logging import Logger +from logging import Logger, _Level as _LoggingLevel from multiprocessing import popen_fork, popen_forkserver, popen_spawn_posix, popen_spawn_win32, queues, synchronize -from multiprocessing.connection import _ConnectionBase from multiprocessing.managers import SyncManager from multiprocessing.pool import Pool as _Pool from multiprocessing.process import BaseProcess @@ -12,6 +11,11 @@ from multiprocessing.sharedctypes import SynchronizedArray, SynchronizedBase from typing import Any, ClassVar, TypeVar, overload from typing_extensions import Literal, TypeAlias +if sys.platform != "win32": + from multiprocessing.connection import Connection +else: + from multiprocessing.connection import PipeConnection + if sys.version_info >= (3, 8): __all__ = () else: @@ -43,25 +47,33 @@ class BaseContext: def active_children() -> list[BaseProcess]: ... def cpu_count(self) -> int: ... def Manager(self) -> SyncManager: ... - def Pipe(self, duplex: bool = ...) -> tuple[_ConnectionBase, _ConnectionBase]: ... + + # N.B. Keep this in sync with multiprocessing.connection.Pipe. + # _ConnectionBase is the common base class of Connection and PipeConnection + # and can be used in cross-platform code. + if sys.platform != "win32": + def Pipe(self, duplex: bool = True) -> tuple[Connection, Connection]: ... + else: + def Pipe(self, duplex: bool = True) -> tuple[PipeConnection, PipeConnection]: ... + def Barrier( - self, parties: int, action: Callable[..., object] | None = ..., timeout: float | None = ... + self, parties: int, action: Callable[..., object] | None = None, timeout: float | None = None ) -> synchronize.Barrier: ... - def BoundedSemaphore(self, value: int = ...) -> synchronize.BoundedSemaphore: ... - def Condition(self, lock: _LockLike | None = ...) -> synchronize.Condition: ... + def BoundedSemaphore(self, value: int = 1) -> synchronize.BoundedSemaphore: ... + def Condition(self, lock: _LockLike | None = None) -> synchronize.Condition: ... def Event(self) -> synchronize.Event: ... def Lock(self) -> synchronize.Lock: ... def RLock(self) -> synchronize.RLock: ... - def Semaphore(self, value: int = ...) -> synchronize.Semaphore: ... - def Queue(self, maxsize: int = ...) -> queues.Queue[Any]: ... - def JoinableQueue(self, maxsize: int = ...) -> queues.JoinableQueue[Any]: ... + def Semaphore(self, value: int = 1) -> synchronize.Semaphore: ... + def Queue(self, maxsize: int = 0) -> queues.Queue[Any]: ... + def JoinableQueue(self, maxsize: int = 0) -> queues.JoinableQueue[Any]: ... def SimpleQueue(self) -> queues.SimpleQueue[Any]: ... def Pool( self, - processes: int | None = ..., - initializer: Callable[..., object] | None = ..., + processes: int | None = None, + initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ..., - maxtasksperchild: int | None = ..., + maxtasksperchild: int | None = None, ) -> _Pool: ... @overload def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... @@ -74,34 +86,34 @@ class BaseContext: @overload def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> _CT: ... @overload - def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = ...) -> SynchronizedBase[_CT]: ... + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True) -> SynchronizedBase[_CT]: ... @overload - def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = ...) -> SynchronizedBase[Any]: ... + def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True) -> SynchronizedBase[Any]: ... @overload - def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = ...) -> Any: ... + def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True) -> Any: ... @overload def Array(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False]) -> _CT: ... @overload def Array( - self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = ... + self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True ) -> SynchronizedArray[_CT]: ... @overload def Array( - self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = ... + self, typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike = True ) -> SynchronizedArray[Any]: ... @overload def Array( - self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = ... + self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = True ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... - def log_to_stderr(self, level: str | None = ...) -> Logger: ... + def log_to_stderr(self, level: _LoggingLevel | None = None) -> Logger: ... def allow_connection_pickling(self) -> None: ... def set_executable(self, executable: str) -> None: ... def set_forkserver_preload(self, module_names: list[str]) -> None: ... if sys.platform != "win32": @overload - def get_context(self, method: None = ...) -> DefaultContext: ... + def get_context(self, method: None = None) -> DefaultContext: ... @overload def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... @overload @@ -112,17 +124,17 @@ class BaseContext: def get_context(self, method: str) -> BaseContext: ... else: @overload - def get_context(self, method: None = ...) -> DefaultContext: ... + def get_context(self, method: None = None) -> DefaultContext: ... @overload def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... @overload def get_context(self, method: str) -> BaseContext: ... @overload - def get_start_method(self, allow_none: Literal[False] = ...) -> str: ... + def get_start_method(self, allow_none: Literal[False] = False) -> str: ... @overload def get_start_method(self, allow_none: bool) -> str | None: ... - def set_start_method(self, method: str | None, force: bool = ...) -> None: ... + def set_start_method(self, method: str | None, force: bool = False) -> None: ... @property def reducer(self) -> str: ... @reducer.setter @@ -137,8 +149,7 @@ class Process(BaseProcess): class DefaultContext(BaseContext): Process: ClassVar[type[Process]] def __init__(self, context: BaseContext) -> None: ... - def set_start_method(self, method: str | None, force: bool = ...) -> None: ... - def get_start_method(self, allow_none: bool = ...) -> str: ... + def get_start_method(self, allow_none: bool = False) -> str: ... def get_all_start_methods(self) -> list[str]: ... if sys.version_info < (3, 8): __all__: ClassVar[list[str]] diff --git a/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi b/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi index 5d289c058e03..5b2a33772de6 100644 --- a/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi @@ -47,9 +47,9 @@ class DummyProcess(threading.Thread): def exitcode(self) -> Literal[0] | None: ... def __init__( self, - group: Any = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: Any = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ..., ) -> None: ... @@ -65,11 +65,13 @@ class Value: _typecode: Any _value: Any value: Any - def __init__(self, typecode: Any, value: Any, lock: Any = ...) -> None: ... + def __init__(self, typecode: Any, value: Any, lock: Any = True) -> None: ... -def Array(typecode: Any, sequence: Sequence[Any], lock: Any = ...) -> array.array[Any]: ... +def Array(typecode: Any, sequence: Sequence[Any], lock: Any = True) -> array.array[Any]: ... def Manager() -> Any: ... -def Pool(processes: int | None = ..., initializer: Callable[..., object] | None = ..., initargs: Iterable[Any] = ...) -> Any: ... +def Pool( + processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... +) -> Any: ... def active_children() -> list[Any]: ... current_process = threading.current_thread diff --git a/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi b/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi index fd909d0d32e1..fcd03a657319 100644 --- a/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/dummy/connection.pyi @@ -1,14 +1,13 @@ -from _typeshed import Self from queue import Queue from types import TracebackType -from typing import Any, Union -from typing_extensions import TypeAlias +from typing import Any +from typing_extensions import Self, TypeAlias __all__ = ["Client", "Listener", "Pipe"] families: list[None] -_Address: TypeAlias = Union[str, tuple[str, int]] +_Address: TypeAlias = str | tuple[str, int] class Connection: _in: Any @@ -17,25 +16,25 @@ class Connection: recv_bytes: Any send: Any send_bytes: Any - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __init__(self, _in: Any, _out: Any) -> None: ... def close(self) -> None: ... - def poll(self, timeout: float = ...) -> bool: ... + def poll(self, timeout: float = 0.0) -> bool: ... class Listener: _backlog_queue: Queue[Any] | None @property def address(self) -> Queue[Any] | None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - def __init__(self, address: _Address | None = ..., family: int | None = ..., backlog: int = ...) -> None: ... + def __init__(self, address: _Address | None = None, family: int | None = None, backlog: int = 1) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... def Client(address: _Address) -> Connection: ... -def Pipe(duplex: bool = ...) -> tuple[Connection, Connection]: ... +def Pipe(duplex: bool = True) -> tuple[Connection, Connection]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi b/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi index 93777d926ca2..df435f00ebe7 100644 --- a/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -1,4 +1,4 @@ -from _typeshed import FileDescriptorLike +from _typeshed import FileDescriptorLike, Unused from collections.abc import Sequence from struct import Struct from typing import Any @@ -9,7 +9,6 @@ MAXFDS_TO_SEND: int SIGNED_STRUCT: Struct class ForkServer: - def __init__(self) -> None: ... def set_forkserver_preload(self, modules_names: list[str]) -> None: ... def get_inherited_fds(self) -> list[int] | None: ... def connect_to_new_process(self, fds: Sequence[int]) -> tuple[int, int]: ... @@ -19,8 +18,8 @@ def main( listener_fd: int | None, alive_r: FileDescriptorLike, preload: Sequence[str], - main_path: str | None = ..., - sys_path: object | None = ..., + main_path: str | None = None, + sys_path: Unused = None, ) -> None: ... def read_signed(fd: int) -> Any: ... def write_signed(fd: int, n: int) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/heap.pyi b/mypy/typeshed/stdlib/multiprocessing/heap.pyi index 9c8f55604a64..b5e2ced5e8ee 100644 --- a/mypy/typeshed/stdlib/multiprocessing/heap.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/heap.pyi @@ -15,7 +15,7 @@ class Arena: def __init__(self, size: int) -> None: ... else: fd: int - def __init__(self, size: int, fd: int = ...) -> None: ... + def __init__(self, size: int, fd: int = -1) -> None: ... _Block: TypeAlias = tuple[Arena, int, int] diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index d953785d81cb..e035a1875650 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -1,11 +1,11 @@ import queue import sys import threading -from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from _typeshed import SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Sequence from types import TracebackType from typing import Any, AnyStr, ClassVar, Generic, TypeVar, overload -from typing_extensions import SupportsIndex, TypeAlias +from typing_extensions import Self, SupportsIndex, TypeAlias from .connection import Connection from .context import BaseContext @@ -47,11 +47,11 @@ class BaseProxy: self, token: Any, serializer: str, - manager: Any = ..., - authkey: AnyStr | None = ..., - exposed: Any = ..., - incref: bool = ..., - manager_owned: bool = ..., + manager: Any = None, + authkey: AnyStr | None = None, + exposed: Any = None, + incref: bool = True, + manager_owned: bool = False, ) -> None: ... def __deepcopy__(self, memo: Any | None) -> Any: ... def _callmethod(self, methodname: str, args: tuple[Any, ...] = ..., kwds: dict[Any, Any] = ...) -> None: ... @@ -68,9 +68,9 @@ class ValueProxy(BaseProxy, Generic[_T]): class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): __builtins__: ClassVar[dict[str, Any]] def __len__(self) -> int: ... - def __getitem__(self, __k: _KT) -> _VT: ... - def __setitem__(self, __k: _KT, __v: _VT) -> None: ... - def __delitem__(self, __v: _KT) -> None: ... + def __getitem__(self, __key: _KT) -> _VT: ... + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def copy(self) -> dict[_KT, _VT]: ... @overload @@ -82,8 +82,8 @@ class DictProxy(BaseProxy, MutableMapping[_KT, _VT]): @overload def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... def keys(self) -> list[_KT]: ... # type: ignore[override] - def values(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] - def items(self) -> list[_VT]: ... # type: ignore[override] + def items(self) -> list[tuple[_KT, _VT]]: ... # type: ignore[override] + def values(self) -> list[_VT]: ... # type: ignore[override] class BaseListProxy(BaseProxy, MutableSequence[_T]): __builtins__: ClassVar[dict[str, Any]] @@ -111,13 +111,13 @@ class BaseListProxy(BaseProxy, MutableSequence[_T]): # Use BaseListProxy[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] # to work around invariance @overload - def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: BaseListProxy[SupportsRichComparisonT], *, key: None = None, reverse: bool = ...) -> None: ... @overload def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... class ListProxy(BaseListProxy[_T]): - def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... # type: ignore[override] - def __imul__(self: Self, __n: SupportsIndex) -> Self: ... # type: ignore[override] + def __iadd__(self, __x: Iterable[_T]) -> Self: ... # type: ignore[override] + def __imul__(self, __n: SupportsIndex) -> Self: ... # type: ignore[override] # Returned by BaseManager.get_server() class Server: @@ -132,36 +132,40 @@ class BaseManager: if sys.version_info >= (3, 11): def __init__( self, - address: Any | None = ..., - authkey: bytes | None = ..., - serializer: str = ..., - ctx: BaseContext | None = ..., + address: Any | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, *, - shutdown_timeout: float = ..., + shutdown_timeout: float = 1.0, ) -> None: ... else: def __init__( - self, address: Any | None = ..., authkey: bytes | None = ..., serializer: str = ..., ctx: BaseContext | None = ... + self, + address: Any | None = None, + authkey: bytes | None = None, + serializer: str = "pickle", + ctx: BaseContext | None = None, ) -> None: ... def get_server(self) -> Server: ... def connect(self) -> None: ... - def start(self, initializer: Callable[..., object] | None = ..., initargs: Iterable[Any] = ...) -> None: ... + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ...) -> None: ... def shutdown(self) -> None: ... # only available after start() was called - def join(self, timeout: float | None = ...) -> None: ... # undocumented + def join(self, timeout: float | None = None) -> None: ... # undocumented @property def address(self) -> Any: ... @classmethod def register( cls, typeid: str, - callable: Callable[..., object] | None = ..., - proxytype: Any = ..., - exposed: Sequence[str] | None = ..., - method_to_typeid: Mapping[str, str] | None = ..., - create_method: bool = ..., + callable: Callable[..., object] | None = None, + proxytype: Any = None, + exposed: Sequence[str] | None = None, + method_to_typeid: Mapping[str, str] | None = None, + create_method: bool = True, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/mypy/typeshed/stdlib/multiprocessing/pool.pyi index 2b97e16f0525..a19dd555e254 100644 --- a/mypy/typeshed/stdlib/multiprocessing/pool.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/pool.pyi @@ -1,9 +1,8 @@ import sys -from _typeshed import Self from collections.abc import Callable, Iterable, Iterator, Mapping from types import TracebackType from typing import Any, Generic, TypeVar -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -26,8 +25,8 @@ class ApplyResult(Generic[_T]): error_callback: Callable[[BaseException], object] | None, ) -> None: ... - def get(self, timeout: float | None = ...) -> _T: ... - def wait(self, timeout: float | None = ...) -> None: ... + def get(self, timeout: float | None = None) -> _T: ... + def wait(self, timeout: float | None = None) -> None: ... def ready(self) -> bool: ... def successful(self) -> bool: ... if sys.version_info >= (3, 9): @@ -62,20 +61,20 @@ class IMapIterator(Iterator[_T]): else: def __init__(self, cache: dict[int, IMapIterator[Any]]) -> None: ... - def __iter__(self: Self) -> Self: ... - def next(self, timeout: float | None = ...) -> _T: ... - def __next__(self, timeout: float | None = ...) -> _T: ... + def __iter__(self) -> Self: ... + def next(self, timeout: float | None = None) -> _T: ... + def __next__(self, timeout: float | None = None) -> _T: ... class IMapUnorderedIterator(IMapIterator[_T]): ... class Pool: def __init__( self, - processes: int | None = ..., - initializer: Callable[..., object] | None = ..., + processes: int | None = None, + initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ..., - maxtasksperchild: int | None = ..., - context: Any | None = ..., + maxtasksperchild: int | None = None, + context: Any | None = None, ) -> None: ... def apply(self, func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ...) -> _T: ... def apply_async( @@ -83,42 +82,40 @@ class Pool: func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ..., - callback: Callable[[_T], object] | None = ..., - error_callback: Callable[[BaseException], object] | None = ..., + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[_T]: ... - def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = ...) -> list[_T]: ... + def map(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = None) -> list[_T]: ... def map_async( self, func: Callable[[_S], _T], iterable: Iterable[_S], - chunksize: int | None = ..., - callback: Callable[[_T], object] | None = ..., - error_callback: Callable[[BaseException], object] | None = ..., + chunksize: int | None = None, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, ) -> MapResult[_T]: ... - def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = ...) -> IMapIterator[_T]: ... - def imap_unordered( - self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = ... - ) -> IMapIterator[_T]: ... - def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = ...) -> list[_T]: ... + def imap(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def imap_unordered(self, func: Callable[[_S], _T], iterable: Iterable[_S], chunksize: int | None = 1) -> IMapIterator[_T]: ... + def starmap(self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], chunksize: int | None = None) -> list[_T]: ... def starmap_async( self, func: Callable[..., _T], iterable: Iterable[Iterable[Any]], - chunksize: int | None = ..., - callback: Callable[[_T], object] | None = ..., - error_callback: Callable[[BaseException], object] | None = ..., + chunksize: int | None = None, + callback: Callable[[_T], object] | None = None, + error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[list[_T]]: ... def close(self) -> None: ... def terminate(self) -> None: ... def join(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class ThreadPool(Pool): def __init__( - self, processes: int | None = ..., initializer: Callable[..., object] | None = ..., initargs: Iterable[Any] = ... + self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... ) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi b/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi index 3db6a84394b9..4fcbfd99a8d0 100644 --- a/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/popen_fork.pyi @@ -16,8 +16,8 @@ if sys.platform != "win32": def __init__(self, process_obj: BaseProcess) -> None: ... def duplicate_for_child(self, fd: int) -> int: ... - def poll(self, flag: int = ...) -> int | None: ... - def wait(self, timeout: float | None = ...) -> int | None: ... + def poll(self, flag: int = 1) -> int | None: ... + def wait(self, timeout: float | None = None) -> int | None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def close(self) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi b/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi index d28c7245fd54..f7d53bbb3e41 100644 --- a/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/popen_forkserver.pyi @@ -1,5 +1,4 @@ import sys -from multiprocessing.process import BaseProcess from typing import ClassVar from . import popen_fork @@ -15,8 +14,3 @@ if sys.platform != "win32": class Popen(popen_fork.Popen): DupFd: ClassVar[type[_DupFd]] finalizer: Finalize - sentinel: int - - def __init__(self, process_obj: BaseProcess) -> None: ... - def duplicate_for_child(self, fd: int) -> int: ... - def poll(self, flag: int = ...) -> int | None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi b/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi index 81aaac7ca459..7e81d39600ad 100644 --- a/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/popen_spawn_posix.pyi @@ -1,5 +1,4 @@ import sys -from multiprocessing.process import BaseProcess from typing import ClassVar from . import popen_fork @@ -19,6 +18,3 @@ if sys.platform != "win32": finalizer: Finalize pid: int # may not exist if _launch raises in second try / except sentinel: int # may not exist if _launch raises in second try / except - - def __init__(self, process_obj: BaseProcess) -> None: ... - def duplicate_for_child(self, fd: int) -> int: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi b/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi index f5cb0a6c4844..3dc9d5bd7332 100644 --- a/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/popen_spawn_win32.pyi @@ -21,7 +21,7 @@ if sys.platform == "win32": def __init__(self, process_obj: BaseProcess) -> None: ... def duplicate_for_child(self, handle: int) -> int: ... - def wait(self, timeout: float | None = ...) -> int | None: ... + def wait(self, timeout: float | None = None) -> int | None: ... def poll(self) -> int | None: ... def terminate(self) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/process.pyi b/mypy/typeshed/stdlib/multiprocessing/process.pyi index 7c8422e391c2..ef1b4b596d33 100644 --- a/mypy/typeshed/stdlib/multiprocessing/process.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/process.pyi @@ -14,20 +14,20 @@ class BaseProcess: _identity: tuple[int, ...] # undocumented def __init__( self, - group: None = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., kwargs: Mapping[str, Any] = ..., *, - daemon: bool | None = ..., + daemon: bool | None = None, ) -> None: ... def run(self) -> None: ... def start(self) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def close(self) -> None: ... - def join(self, timeout: float | None = ...) -> None: ... + def join(self, timeout: float | None = None) -> None: ... def is_alive(self) -> bool: ... @property def exitcode(self) -> int | None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/queues.pyi b/mypy/typeshed/stdlib/multiprocessing/queues.pyi index 1d31fa694c45..7ba17dcfbe05 100644 --- a/mypy/typeshed/stdlib/multiprocessing/queues.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/queues.pyi @@ -12,21 +12,16 @@ _T = TypeVar("_T") class Queue(queue.Queue[_T]): # FIXME: `ctx` is a circular dependency and it's not actually optional. # It's marked as such to be able to use the generic Queue in __init__.pyi. - def __init__(self, maxsize: int = ..., *, ctx: Any = ...) -> None: ... - def get(self, block: bool = ..., timeout: float | None = ...) -> _T: ... - def put(self, obj: _T, block: bool = ..., timeout: float | None = ...) -> None: ... - def qsize(self) -> int: ... - def empty(self) -> bool: ... - def full(self) -> bool: ... + def __init__(self, maxsize: int = 0, *, ctx: Any = ...) -> None: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def put(self, obj: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def get_nowait(self) -> _T: ... def close(self) -> None: ... def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... -class JoinableQueue(Queue[_T]): - def task_done(self) -> None: ... - def join(self) -> None: ... +class JoinableQueue(Queue[_T]): ... class SimpleQueue(Generic[_T]): def __init__(self, *, ctx: Any = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi index a22c16828780..e5a8cde8f849 100644 --- a/mypy/typeshed/stdlib/multiprocessing/reduction.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/reduction.pyi @@ -1,8 +1,12 @@ import pickle import sys -from _typeshed import HasFileno +from _typeshed import HasFileno, SupportsWrite, Unused from abc import ABCMeta +from builtins import type as Type # alias to avoid name clash +from collections.abc import Callable from copyreg import _DispatchTableType +from multiprocessing import connection +from pickle import _ReducedType from socket import socket from typing import Any from typing_extensions import Literal @@ -12,34 +16,36 @@ if sys.platform == "win32": else: __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] +HAVE_SEND_HANDLE: bool + class ForkingPickler(pickle.Pickler): dispatch_table: _DispatchTableType - def __init__(self, *args) -> None: ... + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ...) -> None: ... @classmethod - def register(cls, type, reduce) -> None: ... + def register(cls, type: Type, reduce: Callable[[Any], _ReducedType]) -> None: ... @classmethod - def dumps(cls, obj, protocol: Any | None = ...): ... + def dumps(cls, obj: Any, protocol: int | None = None) -> memoryview: ... loads = pickle.loads register = ForkingPickler.register -def dump(obj, file, protocol: Any | None = ...) -> None: ... +def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None) -> None: ... if sys.platform == "win32": if sys.version_info >= (3, 8): def duplicate( - handle: int, target_process: int | None = ..., inheritable: bool = ..., *, source_process: int | None = ... + handle: int, target_process: int | None = None, inheritable: bool = False, *, source_process: int | None = None ) -> int: ... else: - def duplicate(handle: int, target_process: int | None = ..., inheritable: bool = ...) -> int: ... + def duplicate(handle: int, target_process: int | None = None, inheritable: bool = False) -> int: ... - def steal_handle(source_pid, handle): ... - def send_handle(conn, handle, destination_pid) -> None: ... - def recv_handle(conn): ... + def steal_handle(source_pid: int, handle: int) -> int: ... + def send_handle(conn: connection.PipeConnection, handle: int, destination_pid: int) -> None: ... + def recv_handle(conn: connection.PipeConnection) -> int: ... class DupHandle: - def __init__(self, handle, access, pid: Any | None = ...) -> None: ... - def detach(self): ... + def __init__(self, handle: int, access: int, pid: int | None = None) -> None: ... + def detach(self) -> int: ... else: if sys.platform == "darwin": @@ -48,10 +54,10 @@ else: ACKNOWLEDGE: Literal[False] def recvfds(sock: socket, size: int) -> list[int]: ... - def send_handle(conn, handle, destination_pid) -> None: ... + def send_handle(conn: HasFileno, handle: int, destination_pid: Unused) -> None: ... def recv_handle(conn: HasFileno) -> int: ... - def sendfds(sock, fds) -> None: ... - def DupFd(fd): ... + def sendfds(sock: socket, fds: list[int]) -> None: ... + def DupFd(fd: int) -> Any: ... # Return type is really hard to get right # These aliases are to work around pyright complaints. # Pyright doesn't like it when a class object is defined as an alias @@ -85,4 +91,4 @@ class AbstractReducer(metaclass=ABCMeta): sendfds = _sendfds recvfds = _recvfds DupFd = _DupFd - def __init__(self, *args) -> None: ... + def __init__(self, *args: Unused) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi b/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi index 7708df9b6f3c..5fee7cf31e17 100644 --- a/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/resource_sharer.pyi @@ -17,4 +17,4 @@ else: def __init__(self, fd: int) -> None: ... def detach(self) -> int: ... -def stop(timeout: float | None = ...) -> None: ... +def stop(timeout: float | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi index 98abb075fb3d..e2b940796126 100644 --- a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -1,10 +1,9 @@ -from _typeshed import Incomplete, StrOrBytesPath +from _typeshed import FileDescriptorOrPath, Incomplete from collections.abc import Sized __all__ = ["ensure_running", "register", "unregister"] class ResourceTracker: - def __init__(self) -> None: ... def getfd(self) -> int | None: ... def ensure_running(self) -> None: ... def register(self, name: Sized, rtype: Incomplete) -> None: ... @@ -16,4 +15,4 @@ register = _resource_tracker.register unregister = _resource_tracker.unregister getfd = _resource_tracker.getfd -def main(fd: StrOrBytesPath | int) -> None: ... +def main(fd: FileDescriptorOrPath) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi index 3ce0ca3863cc..ae6e2a0ed19f 100644 --- a/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/shared_memory.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self from collections.abc import Iterable from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -11,7 +11,7 @@ __all__ = ["SharedMemory", "ShareableList"] _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) class SharedMemory: - def __init__(self, name: str | None = ..., create: bool = ..., size: int = ...) -> None: ... + def __init__(self, name: str | None = None, create: bool = False, size: int = 0) -> None: ... @property def buf(self) -> memoryview: ... @property @@ -24,12 +24,12 @@ class SharedMemory: class ShareableList(Generic[_SLT]): shm: SharedMemory @overload - def __init__(self, sequence: None = ..., *, name: str | None = ...) -> None: ... + def __init__(self, sequence: None = None, *, name: str | None = None) -> None: ... @overload - def __init__(self, sequence: Iterable[_SLT], *, name: str | None = ...) -> None: ... + def __init__(self, sequence: Iterable[_SLT], *, name: str | None = None) -> None: ... def __getitem__(self, position: int) -> _SLT: ... def __setitem__(self, position: int, value: _SLT) -> None: ... - def __reduce__(self: Self) -> tuple[Self, tuple[_SLT, ...]]: ... + def __reduce__(self) -> tuple[Self, tuple[_SLT, ...]]: ... def __len__(self) -> int: ... @property def format(self) -> str: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi index e988cda322f4..686a45d9ae41 100644 --- a/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/sharedctypes.pyi @@ -21,56 +21,56 @@ def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[An @overload def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload -def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = ...) -> _CT: ... +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = None) -> _CT: ... @overload def Value( - typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None ) -> SynchronizedBase[_CT]: ... @overload def Value( - typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike = True, ctx: BaseContext | None = None ) -> SynchronizedBase[Any]: ... @overload def Value( - typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = True, ctx: BaseContext | None = None ) -> Any: ... @overload def Array( - typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = ... + typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = None ) -> _CT: ... @overload def Array( typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, - lock: Literal[True] | _LockLike = ..., - ctx: BaseContext | None = ..., + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, ) -> SynchronizedArray[_CT]: ... @overload def Array( typecode_or_type: str, size_or_initializer: int | Sequence[Any], *, - lock: Literal[True] | _LockLike = ..., - ctx: BaseContext | None = ..., + lock: Literal[True] | _LockLike = True, + ctx: BaseContext | None = None, ) -> SynchronizedArray[Any]: ... @overload def Array( typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, - lock: bool | _LockLike = ..., - ctx: BaseContext | None = ..., + lock: bool | _LockLike = True, + ctx: BaseContext | None = None, ) -> Any: ... def copy(obj: _CT) -> _CT: ... @overload -def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = ..., ctx: Any | None = ...) -> Synchronized[_T]: ... +def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ... @overload -def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = ..., ctx: Any | None = ...) -> SynchronizedString: ... +def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... @overload -def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = ..., ctx: Any | None = ...) -> SynchronizedArray[_CT]: ... +def synchronized(obj: ctypes.Array[_CT], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedArray[_CT]: ... @overload -def synchronized(obj: _CT, lock: _LockLike | None = ..., ctx: Any | None = ...) -> SynchronizedBase[_CT]: ... +def synchronized(obj: _CT, lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedBase[_CT]: ... class _AcquireFunc(Protocol): def __call__(self, block: bool = ..., timeout: float | None = ...) -> bool: ... @@ -78,7 +78,7 @@ class _AcquireFunc(Protocol): class SynchronizedBase(Generic[_CT]): acquire: _AcquireFunc release: Callable[[], None] - def __init__(self, obj: Any, lock: _LockLike | None = ..., ctx: Any | None = ...) -> None: ... + def __init__(self, obj: Any, lock: _LockLike | None = None, ctx: Any | None = None) -> None: ... def __reduce__(self) -> tuple[Callable[[Any, _LockLike], SynchronizedBase[Any]], tuple[Any, _LockLike]]: ... def get_obj(self) -> _CT: ... def get_lock(self) -> _LockLike: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/spawn.pyi b/mypy/typeshed/stdlib/multiprocessing/spawn.pyi index 50570ff3717b..26ff165756bf 100644 --- a/mypy/typeshed/stdlib/multiprocessing/spawn.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/spawn.pyi @@ -20,7 +20,7 @@ def get_executable() -> str: ... def is_forking(argv: Sequence[str]) -> bool: ... def freeze_support() -> None: ... def get_command_line(**kwds: Any) -> list[str]: ... -def spawn_main(pipe_handle: int, parent_pid: int | None = ..., tracker_fd: int | None = ...) -> None: ... +def spawn_main(pipe_handle: int, parent_pid: int | None = None, tracker_fd: int | None = None) -> None: ... # undocumented def _main(fd: int) -> Any: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi b/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi index c89142f2cd3b..7043759078a2 100644 --- a/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi @@ -11,18 +11,18 @@ _LockLike: TypeAlias = Lock | RLock class Barrier(threading.Barrier): def __init__( - self, parties: int, action: Callable[[], object] | None = ..., timeout: float | None = ..., *ctx: BaseContext + self, parties: int, action: Callable[[], object] | None = None, timeout: float | None = None, *ctx: BaseContext ) -> None: ... class BoundedSemaphore(Semaphore): - def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... class Condition(AbstractContextManager[bool]): - def __init__(self, lock: _LockLike | None = ..., *, ctx: BaseContext) -> None: ... - def notify(self, n: int = ...) -> None: ... + def __init__(self, lock: _LockLike | None = None, *, ctx: BaseContext) -> None: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... - def wait_for(self, predicate: Callable[[], bool], timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], bool], timeout: float | None = None) -> bool: ... def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... def release(self) -> None: ... def __exit__( @@ -34,7 +34,7 @@ class Event: def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... class Lock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... @@ -43,7 +43,7 @@ class RLock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... class Semaphore(SemLock): - def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... # Not part of public API class SemLock(AbstractContextManager[bool]): diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi index e89b4a71cad4..006ec3a9f6ce 100644 --- a/mypy/typeshed/stdlib/multiprocessing/util.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -1,7 +1,7 @@ import threading -from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc +from _typeshed import Incomplete, ReadableBuffer, SupportsTrunc, Unused from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence -from logging import Logger +from logging import Logger, _Level as _LoggingLevel from typing import Any, SupportsInt from typing_extensions import SupportsIndex @@ -37,7 +37,7 @@ def debug(msg: object, *args: object) -> None: ... def info(msg: object, *args: object) -> None: ... def sub_warning(msg: object, *args: object) -> None: ... def get_logger() -> Logger: ... -def log_to_stderr(level: int | None = ...) -> Logger: ... +def log_to_stderr(level: _LoggingLevel | None = None) -> Logger: ... def is_abstract_socket_namespace(address: str | bytes | None) -> bool: ... abstract_sockets_supported: bool @@ -51,12 +51,12 @@ class Finalize: obj: Incomplete | None, callback: Callable[..., Incomplete], args: Sequence[Any] = ..., - kwargs: Mapping[str, Any] | None = ..., - exitpriority: int | None = ..., + kwargs: Mapping[str, Any] | None = None, + exitpriority: int | None = None, ) -> None: ... def __call__( self, - wr: object = ..., + wr: Unused = None, _finalizer_registry: MutableMapping[Incomplete, Incomplete] = ..., sub_debug: Callable[..., object] = ..., getpid: Callable[[], int] = ..., @@ -69,12 +69,10 @@ def is_exiting() -> bool: ... class ForkAwareThreadLock: acquire: Callable[[bool, float], bool] release: Callable[[], None] - def __init__(self) -> None: ... def __enter__(self) -> bool: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... -class ForkAwareLocal(threading.local): - def __init__(self) -> None: ... +class ForkAwareLocal(threading.local): ... MAXFD: int diff --git a/mypy/typeshed/stdlib/netrc.pyi b/mypy/typeshed/stdlib/netrc.pyi index 803c78073348..480f55a46d64 100644 --- a/mypy/typeshed/stdlib/netrc.pyi +++ b/mypy/typeshed/stdlib/netrc.pyi @@ -1,3 +1,4 @@ +import sys from _typeshed import StrOrBytesPath from typing_extensions import TypeAlias @@ -7,13 +8,16 @@ class NetrcParseError(Exception): filename: str | None lineno: int | None msg: str - def __init__(self, msg: str, filename: StrOrBytesPath | None = ..., lineno: int | None = ...) -> None: ... + def __init__(self, msg: str, filename: StrOrBytesPath | None = None, lineno: int | None = None) -> None: ... # (login, account, password) tuple -_NetrcTuple: TypeAlias = tuple[str, str | None, str | None] +if sys.version_info >= (3, 11): + _NetrcTuple: TypeAlias = tuple[str, str, str] +else: + _NetrcTuple: TypeAlias = tuple[str, str | None, str | None] class netrc: hosts: dict[str, _NetrcTuple] macros: dict[str, list[str]] - def __init__(self, file: StrOrBytesPath | None = ...) -> None: ... + def __init__(self, file: StrOrBytesPath | None = None) -> None: ... def authenticators(self, host: str) -> _NetrcTuple | None: ... diff --git a/mypy/typeshed/stdlib/nntplib.pyi b/mypy/typeshed/stdlib/nntplib.pyi index aa5bcba5726c..f948c1430c90 100644 --- a/mypy/typeshed/stdlib/nntplib.pyi +++ b/mypy/typeshed/stdlib/nntplib.pyi @@ -2,11 +2,11 @@ import datetime import socket import ssl import sys -from _typeshed import Self +from _typeshed import Unused from builtins import list as _list # conflicts with a method named "list" from collections.abc import Iterable from typing import IO, Any, NamedTuple -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "NNTP", @@ -65,49 +65,49 @@ class NNTP: def __init__( self, host: str, - port: int = ..., - user: str | None = ..., - password: str | None = ..., - readermode: bool | None = ..., - usenetrc: bool = ..., + port: int = 119, + user: str | None = None, + password: str | None = None, + readermode: bool | None = None, + usenetrc: bool = False, timeout: float = ..., ) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def getwelcome(self) -> str: ... def getcapabilities(self) -> dict[str, _list[str]]: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... - def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def list(self, group_pattern: str | None = ..., *, file: _File = ...) -> tuple[str, _list[str]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = None) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = None, *, file: _File = None) -> tuple[str, _list[str]]: ... def description(self, group: str) -> str: ... def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... def group(self, name: str) -> tuple[str, int, int, int, str]: ... - def help(self, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def stat(self, message_spec: Any = ...) -> tuple[str, int, str]: ... + def help(self, *, file: _File = None) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = None) -> tuple[str, int, str]: ... def next(self) -> tuple[str, int, str]: ... def last(self) -> tuple[str, int, str]: ... - def head(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... - def body(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... - def article(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... + def head(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = None, *, file: _File = None) -> tuple[str, ArticleInfo]: ... def slave(self) -> str: ... - def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> tuple[str, _list[str]]: ... - def xover(self, start: int, end: int, *, file: _File = ...) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = None) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = None) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... def over( - self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = ... + self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = None ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... if sys.version_info < (3, 9): - def xgtitle(self, group: str, *, file: _File = ...) -> tuple[str, _list[tuple[str, str]]]: ... + def xgtitle(self, group: str, *, file: _File = None) -> tuple[str, _list[tuple[str, str]]]: ... def xpath(self, id: Any) -> tuple[str, str]: ... def date(self) -> tuple[str, datetime.datetime]: ... def post(self, data: bytes | Iterable[bytes]) -> str: ... def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... def quit(self) -> str: ... - def login(self, user: str | None = ..., password: str | None = ..., usenetrc: bool = ...) -> None: ... - def starttls(self, context: ssl.SSLContext | None = ...) -> None: ... + def login(self, user: str | None = None, password: str | None = None, usenetrc: bool = True) -> None: ... + def starttls(self, context: ssl.SSLContext | None = None) -> None: ... class NNTP_SSL(NNTP): ssl_context: ssl.SSLContext | None @@ -115,11 +115,11 @@ class NNTP_SSL(NNTP): def __init__( self, host: str, - port: int = ..., - user: str | None = ..., - password: str | None = ..., - ssl_context: ssl.SSLContext | None = ..., - readermode: bool | None = ..., - usenetrc: bool = ..., + port: int = 563, + user: str | None = None, + password: str | None = None, + ssl_context: ssl.SSLContext | None = None, + readermode: bool | None = None, + usenetrc: bool = False, timeout: float = ..., ) -> None: ... diff --git a/mypy/typeshed/stdlib/ntpath.pyi b/mypy/typeshed/stdlib/ntpath.pyi index 0cd3e446475b..f1fa137c6d88 100644 --- a/mypy/typeshed/stdlib/ntpath.pyi +++ b/mypy/typeshed/stdlib/ntpath.pyi @@ -101,9 +101,9 @@ def join(__path: BytesPath, *paths: BytesPath) -> bytes: ... if sys.platform == "win32": if sys.version_info >= (3, 10): @overload - def realpath(path: PathLike[AnyStr], *, strict: bool = ...) -> AnyStr: ... + def realpath(path: PathLike[AnyStr], *, strict: bool = False) -> AnyStr: ... @overload - def realpath(path: AnyStr, *, strict: bool = ...) -> AnyStr: ... + def realpath(path: AnyStr, *, strict: bool = False) -> AnyStr: ... else: @overload def realpath(path: PathLike[AnyStr]) -> AnyStr: ... diff --git a/mypy/typeshed/stdlib/numbers.pyi b/mypy/typeshed/stdlib/numbers.pyi index d94ae7faf890..55f21041ae44 100644 --- a/mypy/typeshed/stdlib/numbers.pyi +++ b/mypy/typeshed/stdlib/numbers.pyi @@ -60,7 +60,7 @@ class Real(Complex, SupportsFloat): def __ceil__(self) -> int: ... @abstractmethod @overload - def __round__(self, ndigits: None = ...) -> int: ... + def __round__(self, ndigits: None = None) -> int: ... @abstractmethod @overload def __round__(self, ndigits: int) -> Any: ... @@ -99,7 +99,7 @@ class Integral(Rational): def __int__(self) -> int: ... def __index__(self) -> int: ... @abstractmethod - def __pow__(self, exponent: Any, modulus: Any | None = ...) -> Any: ... + def __pow__(self, exponent: Any, modulus: Any | None = None) -> Any: ... @abstractmethod def __lshift__(self, other: Any) -> Any: ... @abstractmethod diff --git a/mypy/typeshed/stdlib/opcode.pyi b/mypy/typeshed/stdlib/opcode.pyi index 402dbb74cf58..1232454e71ea 100644 --- a/mypy/typeshed/stdlib/opcode.pyi +++ b/mypy/typeshed/stdlib/opcode.pyi @@ -49,9 +49,9 @@ HAVE_ARGUMENT: Literal[90] EXTENDED_ARG: Literal[144] if sys.version_info >= (3, 8): - def stack_effect(__opcode: int, __oparg: int | None = ..., *, jump: bool | None = ...) -> int: ... + def stack_effect(__opcode: int, __oparg: int | None = None, *, jump: bool | None = None) -> int: ... else: - def stack_effect(__opcode: int, __oparg: int | None = ...) -> int: ... + def stack_effect(__opcode: int, __oparg: int | None = None) -> int: ... hasnargs: list[int] diff --git a/mypy/typeshed/stdlib/operator.pyi b/mypy/typeshed/stdlib/operator.pyi index c3fc4b0a8503..a0e5df7977da 100644 --- a/mypy/typeshed/stdlib/operator.pyi +++ b/mypy/typeshed/stdlib/operator.pyi @@ -1,5 +1,4 @@ import sys - from _operator import * __all__ = [ diff --git a/mypy/typeshed/stdlib/optparse.pyi b/mypy/typeshed/stdlib/optparse.pyi index b571ff0680b7..a8c1c4cfb93e 100644 --- a/mypy/typeshed/stdlib/optparse.pyi +++ b/mypy/typeshed/stdlib/optparse.pyi @@ -42,7 +42,6 @@ class AmbiguousOptionError(BadOptionError): def __init__(self, opt_str: str, possibilities: Sequence[str]) -> None: ... class OptionError(OptParseError): - msg: str option_id: str def __init__(self, msg: str, option: Option) -> None: ... @@ -83,14 +82,14 @@ class HelpFormatter: class IndentedHelpFormatter(HelpFormatter): def __init__( - self, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ..., short_first: int = ... + self, indent_increment: int = 2, max_help_position: int = 24, width: int | None = None, short_first: int = 1 ) -> None: ... def format_heading(self, heading: str) -> str: ... def format_usage(self, usage: str) -> str: ... class TitledHelpFormatter(HelpFormatter): def __init__( - self, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ..., short_first: int = ... + self, indent_increment: int = 0, max_help_position: int = 24, width: int | None = None, short_first: int = 0 ) -> None: ... def format_heading(self, heading: str) -> str: ... def format_usage(self, usage: str) -> str: ... @@ -168,18 +167,18 @@ class OptionGroup(OptionContainer): option_list: list[Option] parser: OptionParser title: str - def __init__(self, parser: OptionParser, title: str, description: str | None = ...) -> None: ... + def __init__(self, parser: OptionParser, title: str, description: str | None = None) -> None: ... def _create_option_list(self) -> None: ... def set_title(self, title: str) -> None: ... class Values: - def __init__(self, defaults: Mapping[str, Any] | None = ...) -> None: ... + def __init__(self, defaults: Mapping[str, Any] | None = None) -> None: ... def _update(self, dict: Mapping[str, Any], mode: Any) -> None: ... def _update_careful(self, dict: Mapping[str, Any]) -> None: ... def _update_loose(self, dict: Mapping[str, Any]) -> None: ... def ensure_value(self, attr: str, value: Any) -> Any: ... - def read_file(self, filename: str, mode: str = ...) -> None: ... - def read_module(self, modname: str, mode: str = ...) -> None: ... + def read_file(self, filename: str, mode: str = "careful") -> None: ... + def read_module(self, modname: str, mode: str = "careful") -> None: ... def __getattr__(self, name: str) -> Any: ... def __setattr__(self, __name: str, __value: Any) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -200,16 +199,16 @@ class OptionParser(OptionContainer): version: str def __init__( self, - usage: str | None = ..., - option_list: Iterable[Option] | None = ..., + usage: str | None = None, + option_list: Iterable[Option] | None = None, option_class: type[Option] = ..., - version: str | None = ..., - conflict_handler: str = ..., - description: str | None = ..., - formatter: HelpFormatter | None = ..., - add_help_option: bool = ..., - prog: str | None = ..., - epilog: str | None = ..., + version: str | None = None, + conflict_handler: str = "error", + description: str | None = None, + formatter: HelpFormatter | None = None, + add_help_option: bool = True, + prog: str | None = None, + epilog: str | None = None, ) -> None: ... def _add_help_option(self) -> None: ... def _add_version_option(self) -> None: ... @@ -218,7 +217,7 @@ class OptionParser(OptionContainer): def _get_args(self, args: Iterable[Any]) -> list[Any]: ... def _init_parsing_state(self) -> None: ... def _match_long_opt(self, opt: str) -> str: ... - def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = ...) -> None: ... + def _populate_option_list(self, option_list: Iterable[Option], add_help: bool = True) -> None: ... def _process_args(self, largs: list[Any], rargs: list[Any], values: Values) -> None: ... def _process_long_opt(self, rargs: list[Any], values: Any) -> None: ... def _process_short_opts(self, rargs: list[Any], values: Any) -> None: ... @@ -230,23 +229,23 @@ class OptionParser(OptionContainer): def disable_interspersed_args(self) -> None: ... def enable_interspersed_args(self) -> None: ... def error(self, msg: str) -> None: ... - def exit(self, status: int = ..., msg: str | None = ...) -> None: ... + def exit(self, status: int = 0, msg: str | None = None) -> None: ... def expand_prog_name(self, s: str | None) -> Any: ... def format_epilog(self, formatter: HelpFormatter) -> Any: ... - def format_help(self, formatter: HelpFormatter | None = ...) -> str: ... - def format_option_help(self, formatter: HelpFormatter | None = ...) -> str: ... + def format_help(self, formatter: HelpFormatter | None = None) -> str: ... + def format_option_help(self, formatter: HelpFormatter | None = None) -> str: ... def get_default_values(self) -> Values: ... def get_option_group(self, opt_str: str) -> Any: ... def get_prog_name(self) -> str: ... def get_usage(self) -> str: ... def get_version(self) -> str: ... @overload - def parse_args(self, args: None = ..., values: Values | None = ...) -> tuple[Values, list[str]]: ... + def parse_args(self, args: None = None, values: Values | None = None) -> tuple[Values, list[str]]: ... @overload - def parse_args(self, args: Sequence[AnyStr], values: Values | None = ...) -> tuple[Values, list[AnyStr]]: ... - def print_usage(self, file: IO[str] | None = ...) -> None: ... - def print_help(self, file: IO[str] | None = ...) -> None: ... - def print_version(self, file: IO[str] | None = ...) -> None: ... + def parse_args(self, args: Sequence[AnyStr], values: Values | None = None) -> tuple[Values, list[AnyStr]]: ... + def print_usage(self, file: IO[str] | None = None) -> None: ... + def print_help(self, file: IO[str] | None = None) -> None: ... + def print_version(self, file: IO[str] | None = None) -> None: ... def set_default(self, dest: Any, value: Any) -> None: ... def set_defaults(self, **kwargs: Any) -> None: ... def set_process_default_values(self, process: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/os/__init__.pyi b/mypy/typeshed/stdlib/os/__init__.pyi index e3d428555462..595b78789c6a 100644 --- a/mypy/typeshed/stdlib/os/__init__.pyi +++ b/mypy/typeshed/stdlib/os/__init__.pyi @@ -3,15 +3,19 @@ from _typeshed import ( AnyStr_co, BytesPath, FileDescriptorLike, + FileDescriptorOrPath, GenericPath, OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode, - Self, + ReadableBuffer, StrOrBytesPath, StrPath, + SupportsLenAndGetItem, + Unused, + WriteableBuffer, structseq, ) from abc import abstractmethod @@ -21,7 +25,7 @@ from contextlib import AbstractContextManager from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper as _TextIOWrapper from subprocess import Popen from typing import IO, Any, AnyStr, BinaryIO, Generic, NoReturn, Protocol, TypeVar, overload, runtime_checkable -from typing_extensions import Final, Literal, TypeAlias, final +from typing_extensions import Final, Literal, Self, TypeAlias, final from . import path as _path @@ -240,9 +244,9 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): # overloading MutableMapping.update in stdlib/typing.pyi # The type: ignore is needed due to incompatible __or__/__ior__ signatures @overload # type: ignore[misc] - def __ior__(self: Self, other: Mapping[AnyStr, AnyStr]) -> Self: ... + def __ior__(self, other: Mapping[AnyStr, AnyStr]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... environ: _Environ[str] if sys.platform != "win32": @@ -362,14 +366,11 @@ class PathLike(Protocol[AnyStr_co]): def __fspath__(self) -> AnyStr_co: ... @overload -def listdir(path: StrPath | None = ...) -> list[str]: ... +def listdir(path: StrPath | None = None) -> list[str]: ... @overload def listdir(path: BytesPath) -> list[bytes]: ... @overload def listdir(path: int) -> list[str]: ... - -_FdOrAnyPath: TypeAlias = int | StrOrBytesPath - @final class DirEntry(Generic[AnyStr]): # This is what the scandir iterator yields @@ -380,10 +381,10 @@ class DirEntry(Generic[AnyStr]): @property def path(self) -> AnyStr: ... def inode(self) -> int: ... - def is_dir(self, *, follow_symlinks: bool = ...) -> bool: ... - def is_file(self, *, follow_symlinks: bool = ...) -> bool: ... + def is_dir(self, *, follow_symlinks: bool = True) -> bool: ... + def is_file(self, *, follow_symlinks: bool = True) -> bool: ... def is_symlink(self) -> bool: ... - def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... def __fspath__(self) -> AnyStr: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -435,7 +436,7 @@ def fspath(path: str) -> str: ... def fspath(path: bytes) -> bytes: ... @overload def fspath(path: PathLike[AnyStr]) -> AnyStr: ... -def get_exec_path(env: Mapping[str, str] | None = ...) -> list[str]: ... +def get_exec_path(env: Mapping[str, str] | None = None) -> list[str]: ... def getlogin() -> str: ... def getpid() -> int: ... def getppid() -> int: ... @@ -500,20 +501,23 @@ if sys.platform != "win32": def getenvb(key: bytes) -> bytes | None: ... @overload def getenvb(key: bytes, default: _T) -> bytes | _T: ... + def putenv(__name: StrOrBytesPath, __value: StrOrBytesPath) -> None: ... + def unsetenv(__name: StrOrBytesPath) -> None: ... -def putenv(__name: bytes | str, __value: bytes | str) -> None: ... +else: + def putenv(__name: str, __value: str) -> None: ... -if sys.platform != "win32" or sys.version_info >= (3, 9): - def unsetenv(__name: bytes | str) -> None: ... + if sys.version_info >= (3, 9): + def unsetenv(__name: str) -> None: ... _Opener: TypeAlias = Callable[[str, int], int] @overload def fdopen( fd: int, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, errors: str | None = ..., newline: str | None = ..., closefd: bool = ..., @@ -524,9 +528,9 @@ def fdopen( fd: int, mode: OpenBinaryMode, buffering: Literal[0], - encoding: None = ..., - errors: None = ..., - newline: None = ..., + encoding: None = None, + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> FileIO: ... @@ -534,10 +538,10 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryModeUpdating, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BufferedRandom: ... @@ -545,10 +549,10 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryModeWriting, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BufferedWriter: ... @@ -556,10 +560,10 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryModeReading, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BufferedReader: ... @@ -567,10 +571,10 @@ def fdopen( def fdopen( fd: int, mode: OpenBinaryMode, - buffering: int = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, closefd: bool = ..., opener: _Opener | None = ..., ) -> BinaryIO: ... @@ -578,8 +582,8 @@ def fdopen( def fdopen( fd: int, mode: str, - buffering: int = ..., - encoding: str | None = ..., + buffering: int = -1, + encoding: str | None = None, errors: str | None = ..., newline: str | None = ..., closefd: bool = ..., @@ -589,7 +593,7 @@ def close(fd: int) -> None: ... def closerange(__fd_low: int, __fd_high: int) -> None: ... def device_encoding(fd: int) -> str | None: ... def dup(__fd: int) -> int: ... -def dup2(fd: int, fd2: int, inheritable: bool = ...) -> int: ... +def dup2(fd: int, fd2: int, inheritable: bool = True) -> int: ... def fstat(fd: int) -> stat_result: ... def ftruncate(__fd: int, __length: int) -> None: ... def fsync(fd: FileDescriptorLike) -> None: ... @@ -599,12 +603,11 @@ if sys.platform != "win32" and sys.version_info >= (3, 11): def login_tty(__fd: int) -> None: ... def lseek(__fd: int, __position: int, __how: int) -> int: ... -def open(path: StrOrBytesPath, flags: int, mode: int = ..., *, dir_fd: int | None = ...) -> int: ... +def open(path: StrOrBytesPath, flags: int, mode: int = 0o777, *, dir_fd: int | None = None) -> int: ... def pipe() -> tuple[int, int]: ... def read(__fd: int, __length: int) -> bytes: ... if sys.platform != "win32": - # Unix only def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... def fpathconf(__fd: int, __name: str | int) -> int: ... @@ -620,12 +623,13 @@ if sys.platform != "win32": def posix_fadvise(__fd: int, __offset: int, __length: int, __advice: int) -> None: ... def pread(__fd: int, __length: int, __offset: int) -> bytes: ... - def pwrite(__fd: int, __buffer: bytes, __offset: int) -> int: ... + def pwrite(__fd: int, __buffer: ReadableBuffer, __offset: int) -> int: ... + # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not + def preadv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer], __offset: int, __flags: int = 0) -> int: ... + def pwritev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer], __offset: int, __flags: int = 0) -> int: ... if sys.platform != "darwin": if sys.version_info >= (3, 10): RWF_APPEND: int # docs say available on 3.7+, stubtest says otherwise - def preadv(__fd: int, __buffers: Iterable[bytes], __offset: int, __flags: int = ...) -> int: ... - def pwritev(__fd: int, __buffers: Iterable[bytes], __offset: int, __flags: int = ...) -> int: ... RWF_DSYNC: int RWF_SYNC: int RWF_HIPRI: int @@ -638,12 +642,12 @@ if sys.platform != "win32": in_fd: int, offset: int, count: int, - headers: Sequence[bytes] = ..., - trailers: Sequence[bytes] = ..., - flags: int = ..., + headers: Sequence[ReadableBuffer] = ..., + trailers: Sequence[ReadableBuffer] = ..., + flags: int = 0, ) -> int: ... # FreeBSD and Mac OS X only - def readv(__fd: int, __buffers: Sequence[bytearray]) -> int: ... - def writev(__fd: int, __buffers: Sequence[bytes]) -> int: ... + def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... + def writev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer]) -> int: ... @final class terminal_size(structseq[int], tuple[int, int]): @@ -668,118 +672,134 @@ if sys.platform != "win32": def tcsetpgrp(__fd: int, __pgid: int) -> None: ... def ttyname(__fd: int) -> str: ... -def write(__fd: int, __data: bytes) -> int: ... +def write(__fd: int, __data: ReadableBuffer) -> int: ... def access( - path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., effective_ids: bool = ..., follow_symlinks: bool = ... + path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, effective_ids: bool = False, follow_symlinks: bool = True ) -> bool: ... -def chdir(path: _FdOrAnyPath) -> None: ... +def chdir(path: FileDescriptorOrPath) -> None: ... if sys.platform != "win32": def fchdir(fd: FileDescriptorLike) -> None: ... def getcwd() -> str: ... def getcwdb() -> bytes: ... -def chmod(path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... +def chmod(path: FileDescriptorOrPath, mode: int, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> None: ... if sys.platform != "win32" and sys.platform != "linux": - def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix + def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = True) -> None: ... # some flavors of Unix def lchflags(path: StrOrBytesPath, flags: int) -> None: ... def lchmod(path: StrOrBytesPath, mode: int) -> None: ... if sys.platform != "win32": def chroot(path: StrOrBytesPath) -> None: ... - def chown(path: _FdOrAnyPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... + def chown( + path: FileDescriptorOrPath, uid: int, gid: int, *, dir_fd: int | None = None, follow_symlinks: bool = True + ) -> None: ... def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... def link( src: StrOrBytesPath, dst: StrOrBytesPath, *, - src_dir_fd: int | None = ..., - dst_dir_fd: int | None = ..., - follow_symlinks: bool = ..., + src_dir_fd: int | None = None, + dst_dir_fd: int | None = None, + follow_symlinks: bool = True, ) -> None: ... -def lstat(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> stat_result: ... -def mkdir(path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ...) -> None: ... +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = None) -> stat_result: ... +def mkdir(path: StrOrBytesPath, mode: int = 0o777, *, dir_fd: int | None = None) -> None: ... if sys.platform != "win32": - def mkfifo(path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ...) -> None: ... # Unix only + def mkfifo(path: StrOrBytesPath, mode: int = 0o666, *, dir_fd: int | None = None) -> None: ... # Unix only -def makedirs(name: StrOrBytesPath, mode: int = ..., exist_ok: bool = ...) -> None: ... +def makedirs(name: StrOrBytesPath, mode: int = 0o777, exist_ok: bool = False) -> None: ... if sys.platform != "win32": - def mknod(path: StrOrBytesPath, mode: int = ..., device: int = ..., *, dir_fd: int | None = ...) -> None: ... + def mknod(path: StrOrBytesPath, mode: int = 0o600, device: int = 0, *, dir_fd: int | None = None) -> None: ... def major(__device: int) -> int: ... def minor(__device: int) -> int: ... def makedev(__major: int, __minor: int) -> int: ... - def pathconf(path: _FdOrAnyPath, name: str | int) -> int: ... # Unix only + def pathconf(path: FileDescriptorOrPath, name: str | int) -> int: ... # Unix only -def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... -def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = None) -> AnyStr: ... +def remove(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... def removedirs(name: StrOrBytesPath) -> None: ... -def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ...) -> None: ... +def rename(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None) -> None: ... def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... -def replace(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ...) -> None: ... -def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def replace( + src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None +) -> None: ... +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... class _ScandirIterator(Iterator[DirEntry[AnyStr]], AbstractContextManager[_ScandirIterator[AnyStr]]): def __next__(self) -> DirEntry[AnyStr]: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def close(self) -> None: ... @overload -def scandir(path: None = ...) -> _ScandirIterator[str]: ... +def scandir(path: None = None) -> _ScandirIterator[str]: ... @overload def scandir(path: int) -> _ScandirIterator[str]: ... @overload def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... -def stat(path: _FdOrAnyPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... +def stat(path: FileDescriptorOrPath, *, dir_fd: int | None = None, follow_symlinks: bool = True) -> stat_result: ... if sys.platform != "win32": - def statvfs(path: _FdOrAnyPath) -> statvfs_result: ... # Unix only + def statvfs(path: FileDescriptorOrPath) -> statvfs_result: ... # Unix only -def symlink(src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = ..., *, dir_fd: int | None = ...) -> None: ... +def symlink( + src: StrOrBytesPath, dst: StrOrBytesPath, target_is_directory: bool = False, *, dir_fd: int | None = None +) -> None: ... if sys.platform != "win32": def sync() -> None: ... # Unix only -def truncate(path: _FdOrAnyPath, length: int) -> None: ... # Unix only up to version 3.4 -def unlink(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def truncate(path: FileDescriptorOrPath, length: int) -> None: ... # Unix only up to version 3.4 +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = None) -> None: ... def utime( - path: _FdOrAnyPath, - times: tuple[int, int] | tuple[float, float] | None = ..., + path: FileDescriptorOrPath, + times: tuple[int, int] | tuple[float, float] | None = None, *, ns: tuple[int, int] = ..., - dir_fd: int | None = ..., - follow_symlinks: bool = ..., + dir_fd: int | None = None, + follow_symlinks: bool = True, ) -> None: ... _OnError: TypeAlias = Callable[[OSError], object] def walk( - top: GenericPath[AnyStr], topdown: bool = ..., onerror: _OnError | None = ..., followlinks: bool = ... + top: GenericPath[AnyStr], topdown: bool = True, onerror: _OnError | None = None, followlinks: bool = False ) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... if sys.platform != "win32": @overload def fwalk( - top: StrPath = ..., - topdown: bool = ..., - onerror: _OnError | None = ..., + top: StrPath = ".", + topdown: bool = True, + onerror: _OnError | None = None, *, - follow_symlinks: bool = ..., - dir_fd: int | None = ..., + follow_symlinks: bool = False, + dir_fd: int | None = None, ) -> Iterator[tuple[str, list[str], list[str], int]]: ... @overload def fwalk( - top: bytes, topdown: bool = ..., onerror: _OnError | None = ..., *, follow_symlinks: bool = ..., dir_fd: int | None = ... + top: BytesPath, + topdown: bool = True, + onerror: _OnError | None = None, + *, + follow_symlinks: bool = False, + dir_fd: int | None = None, ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... if sys.platform == "linux": - def getxattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... - def listxattr(path: _FdOrAnyPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... - def removexattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... + def getxattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> bytes: ... + def listxattr(path: FileDescriptorOrPath | None = None, *, follow_symlinks: bool = True) -> list[str]: ... + def removexattr(path: FileDescriptorOrPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... def setxattr( - path: _FdOrAnyPath, attribute: StrOrBytesPath, value: bytes, flags: int = ..., *, follow_symlinks: bool = ... + path: FileDescriptorOrPath, + attribute: StrOrBytesPath, + value: ReadableBuffer, + flags: int = 0, + *, + follow_symlinks: bool = True, ) -> None: ... def abort() -> NoReturn: ... @@ -807,10 +827,14 @@ _ExecVArgs: TypeAlias = ( | list[str | PathLike[Any]] | list[bytes | str | PathLike[Any]] ) +# Depending on the OS, the keys and values are passed either to +# PyUnicode_FSDecoder (which accepts str | ReadableBuffer) or to +# PyUnicode_FSConverter (which accepts StrOrBytesPath). For simplicity, +# we limit to str | bytes. _ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] def execv(__path: StrOrBytesPath, __argv: _ExecVArgs) -> NoReturn: ... -def execve(path: _FdOrAnyPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execve(path: FileDescriptorOrPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... def _exit(status: int) -> NoReturn: ... @@ -829,7 +853,7 @@ class _wrap_close(_TextIOWrapper): def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... def close(self) -> int | None: ... # type: ignore[override] -def popen(cmd: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ... +def popen(cmd: str, mode: str = "r", buffering: int = -1) -> _wrap_close: ... def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... def spawnle(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise sig @@ -861,7 +885,7 @@ def times() -> times_result: ... def waitpid(__pid: int, __options: int) -> tuple[int, int]: ... if sys.platform == "win32": - def startfile(path: StrOrBytesPath, operation: str | None = ...) -> None: ... + def startfile(path: StrOrBytesPath, operation: str | None = None) -> None: ... else: def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... @@ -933,7 +957,7 @@ if sys.platform != "win32": class sched_param(structseq[int], tuple[int]): if sys.version_info >= (3, 10): __match_args__: Final = ("sched_priority",) - def __new__(cls: type[Self], sched_priority: int) -> Self: ... + def __new__(cls, sched_priority: int) -> Self: ... @property def sched_priority(self) -> int: ... @@ -958,7 +982,7 @@ if sys.platform != "win32": def sysconf(__name: str | int) -> int: ... if sys.platform == "linux": - def getrandom(size: int, flags: int = ...) -> bytes: ... + def getrandom(size: int, flags: int = 0) -> bytes: ... def urandom(__size: int) -> bytes: ... @@ -976,8 +1000,8 @@ if sys.version_info >= (3, 8): path: str | None def __init__(self, path: str | None, cookie: _T, remove_dll_directory: Callable[[_T], object]) -> None: ... def close(self) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def add_dll_directory(path: str) -> _AddedDllDirectory: ... if sys.platform == "linux": diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 2a0f1760cae5..114678ed574d 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -5,15 +5,16 @@ from _typeshed import ( OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode, - Self, + ReadableBuffer, + StrOrBytesPath, StrPath, ) -from collections.abc import Generator, Sequence +from collections.abc import Callable, Generator, Iterator, Sequence from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result from types import TracebackType from typing import IO, Any, BinaryIO, overload -from typing_extensions import Literal +from typing_extensions import Literal, Self if sys.version_info >= (3, 9): from types import GenericAlias @@ -37,15 +38,15 @@ class PurePath(PathLike[str]): def suffixes(self) -> list[str]: ... @property def stem(self) -> str: ... - def __new__(cls: type[Self], *args: StrPath) -> Self: ... + def __new__(cls, *args: StrPath) -> Self: ... def __eq__(self, other: object) -> bool: ... def __fspath__(self) -> str: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... def __gt__(self, other: PurePath) -> bool: ... def __ge__(self, other: PurePath) -> bool: ... - def __truediv__(self: Self, key: StrPath) -> Self: ... - def __rtruediv__(self: Self, key: StrPath) -> Self: ... + def __truediv__(self, key: StrPath) -> Self: ... + def __rtruediv__(self, key: StrPath) -> Self: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... def as_uri(self) -> str: ... @@ -55,17 +56,17 @@ class PurePath(PathLike[str]): def is_relative_to(self, *other: StrPath) -> bool: ... def match(self, path_pattern: str) -> bool: ... - def relative_to(self: Self, *other: StrPath) -> Self: ... - def with_name(self: Self, name: str) -> Self: ... + def relative_to(self, *other: StrPath) -> Self: ... + def with_name(self, name: str) -> Self: ... if sys.version_info >= (3, 9): - def with_stem(self: Self, stem: str) -> Self: ... + def with_stem(self, stem: str) -> Self: ... - def with_suffix(self: Self, suffix: str) -> Self: ... - def joinpath(self: Self, *other: StrPath) -> Self: ... + def with_suffix(self, suffix: str) -> Self: ... + def joinpath(self, *other: StrPath) -> Self: ... @property - def parents(self: Self) -> Sequence[Self]: ... + def parents(self) -> Sequence[Self]: ... @property - def parent(self: Self) -> Self: ... + def parent(self) -> Self: ... if sys.version_info >= (3, 9) and sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... @@ -73,20 +74,20 @@ class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... class Path(PurePath): - def __new__(cls: type[Self], *args: StrPath, **kwargs: Any) -> Self: ... - def __enter__(self: Self) -> Self: ... + def __new__(cls, *args: StrPath, **kwargs: Any) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @classmethod - def cwd(cls: type[Self]) -> Self: ... + def cwd(cls) -> Self: ... if sys.version_info >= (3, 10): - def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... - def chmod(self, mode: int, *, follow_symlinks: bool = ...) -> None: ... + def stat(self, *, follow_symlinks: bool = True) -> stat_result: ... + def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... else: def stat(self) -> stat_result: ... def chmod(self, mode: int) -> None: ... def exists(self) -> bool: ... - def glob(self: Self, pattern: str) -> Generator[Self, None, None]: ... + def glob(self, pattern: str) -> Generator[Self, None, None]: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... def is_symlink(self) -> bool: ... @@ -94,64 +95,64 @@ class Path(PurePath): def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... - def iterdir(self: Self) -> Generator[Self, None, None]: ... + def iterdir(self) -> Generator[Self, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> stat_result: ... - def mkdir(self, mode: int = ..., parents: bool = ..., exist_ok: bool = ...) -> None: ... + def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... # Adapted from builtins.open # Text mode: always returns a TextIOWrapper # The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this. @overload def open( self, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIOWrapper: ... # Unbuffered binary mode: returns a FileIO @overload def open( - self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = None, errors: None = None, newline: None = None ) -> FileIO: ... # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter @overload def open( self, mode: OpenBinaryModeUpdating, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedRandom: ... @overload def open( self, mode: OpenBinaryModeWriting, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedWriter: ... @overload def open( self, mode: OpenBinaryModeReading, - buffering: Literal[-1, 1] = ..., - encoding: None = ..., - errors: None = ..., - newline: None = ..., + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, ) -> BufferedReader: ... # Buffering cannot be determined: fall back to BinaryIO @overload def open( - self, mode: OpenBinaryMode, buffering: int = ..., encoding: None = ..., errors: None = ..., newline: None = ... + self, mode: OpenBinaryMode, buffering: int = -1, encoding: None = None, errors: None = None, newline: None = None ) -> BinaryIO: ... # Fallback if mode is not specified @overload def open( - self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + self, mode: str, buffering: int = -1, encoding: str | None = None, errors: str | None = None, newline: str | None = None ) -> IO[Any]: ... if sys.platform != "win32": # These methods do "exist" on Windows, but they always raise NotImplementedError, @@ -161,43 +162,47 @@ class Path(PurePath): def is_mount(self) -> bool: ... if sys.version_info >= (3, 9): - def readlink(self: Self) -> Self: ... + def readlink(self) -> Self: ... if sys.version_info >= (3, 8): - def rename(self: Self, target: str | PurePath) -> Self: ... - def replace(self: Self, target: str | PurePath) -> Self: ... + def rename(self, target: str | PurePath) -> Self: ... + def replace(self, target: str | PurePath) -> Self: ... else: def rename(self, target: str | PurePath) -> None: ... def replace(self, target: str | PurePath) -> None: ... - def resolve(self: Self, strict: bool = ...) -> Self: ... - def rglob(self: Self, pattern: str) -> Generator[Self, None, None]: ... + def resolve(self, strict: bool = False) -> Self: ... + def rglob(self, pattern: str) -> Generator[Self, None, None]: ... def rmdir(self) -> None: ... - def symlink_to(self, target: str | Path, target_is_directory: bool = ...) -> None: ... + def symlink_to(self, target: str | Path, target_is_directory: bool = False) -> None: ... if sys.version_info >= (3, 10): def hardlink_to(self, target: str | Path) -> None: ... - def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ... + def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... if sys.version_info >= (3, 8): - def unlink(self, missing_ok: bool = ...) -> None: ... + def unlink(self, missing_ok: bool = False) -> None: ... else: def unlink(self) -> None: ... @classmethod - def home(cls: type[Self]) -> Self: ... - def absolute(self: Self) -> Self: ... - def expanduser(self: Self) -> Self: ... + def home(cls) -> Self: ... + def absolute(self) -> Self: ... + def expanduser(self) -> Self: ... def read_bytes(self) -> bytes: ... - def read_text(self, encoding: str | None = ..., errors: str | None = ...) -> str: ... - def samefile(self, other_path: str | bytes | int | Path) -> bool: ... - def write_bytes(self, data: bytes) -> int: ... + def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + def samefile(self, other_path: StrPath) -> bool: ... + def write_bytes(self, data: ReadableBuffer) -> int: ... if sys.version_info >= (3, 10): def write_text( - self, data: str, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + self, data: str, encoding: str | None = None, errors: str | None = None, newline: str | None = None ) -> int: ... else: - def write_text(self, data: str, encoding: str | None = ..., errors: str | None = ...) -> int: ... - if sys.version_info >= (3, 8): - def link_to(self, target: StrPath | bytes) -> None: ... + def write_text(self, data: str, encoding: str | None = None, errors: str | None = None) -> int: ... + if sys.version_info >= (3, 8) and sys.version_info < (3, 12): + def link_to(self, target: StrOrBytesPath) -> None: ... + if sys.version_info >= (3, 12): + def walk( + self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ... + ) -> Iterator[tuple[Self, list[str], list[str]]]: ... class PosixPath(Path, PurePosixPath): ... class WindowsPath(Path, PureWindowsPath): ... diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index 6e95dcff6ee2..e2871bb54fa0 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -1,13 +1,12 @@ import signal import sys -from _typeshed import Self from bdb import Bdb from cmd import Cmd from collections.abc import Callable, Iterable, Mapping, Sequence from inspect import _SourceObjectType from types import CodeType, FrameType, TracebackType from typing import IO, Any, ClassVar, TypeVar -from typing_extensions import ParamSpec +from typing_extensions import ParamSpec, Self __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] @@ -18,12 +17,12 @@ line_prefix: str # undocumented class Restart(Exception): ... -def run(statement: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... -def runeval(expression: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> Any: ... +def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ... +def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ... def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... -def set_trace(*, header: str | None = ...) -> None: ... -def post_mortem(t: TracebackType | None = ...) -> None: ... +def set_trace(*, header: str | None = None) -> None: ... +def post_mortem(t: TracebackType | None = None) -> None: ... def pm() -> None: ... class Pdb(Bdb, Cmd): @@ -47,12 +46,12 @@ class Pdb(Bdb, Cmd): curframe_locals: Mapping[str, Any] def __init__( self, - completekey: str = ..., - stdin: IO[str] | None = ..., - stdout: IO[str] | None = ..., - skip: Iterable[str] | None = ..., - nosigint: bool = ..., - readrc: bool = ..., + completekey: str = "tab", + stdin: IO[str] | None = None, + stdout: IO[str] | None = None, + skip: Iterable[str] | None = None, + nosigint: bool = False, + readrc: bool = True, ) -> None: ... def forget(self) -> None: ... def setup(self, f: FrameType | None, tb: TracebackType | None) -> None: ... @@ -66,7 +65,7 @@ class Pdb(Bdb, Cmd): def checkline(self, filename: str, lineno: int) -> int: ... def _getval(self, arg: str) -> object: ... def print_stack_trace(self) -> None: ... - def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = ...) -> None: ... + def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ... def lookupmodule(self, filename: str) -> str | None: ... if sys.version_info < (3, 11): def _runscript(self, filename: str) -> None: ... @@ -127,9 +126,9 @@ class Pdb(Bdb, Cmd): def message(self, msg: str) -> None: ... def error(self, msg: str) -> None: ... def _select_frame(self, number: int) -> None: ... - def _getval_except(self, arg: str, frame: FrameType | None = ...) -> object: ... + def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... def _print_lines( - self, lines: Sequence[str], start: int, breaks: Sequence[int] = ..., frame: FrameType | None = ... + self, lines: Sequence[str], start: int, breaks: Sequence[int] = ..., frame: FrameType | None = None ) -> None: ... def _cmdloop(self) -> None: ... def do_display(self, arg: str) -> bool | None: ... @@ -173,4 +172,4 @@ def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): - def __repr__(self: Self) -> Self: ... + def __repr__(self) -> Self: ... diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index 9a94e9eced3c..57c4cb03e484 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import ReadableBuffer +from _typeshed import ReadableBuffer, SupportsWrite from collections.abc import Callable, Iterable, Iterator, Mapping -from typing import Any, ClassVar, Protocol, SupportsBytes, Union +from typing import Any, ClassVar, Protocol, SupportsBytes from typing_extensions import SupportsIndex, TypeAlias, final __all__ = [ @@ -97,9 +97,6 @@ class _ReadableFileobj(Protocol): def read(self, __n: int) -> bytes: ... def readline(self) -> bytes: ... -class _WritableFileobj(Protocol): - def write(self, __b: bytes) -> Any: ... - if sys.version_info >= (3, 8): @final class PickleBuffer: @@ -109,49 +106,49 @@ if sys.version_info >= (3, 8): _BufferCallback: TypeAlias = Callable[[PickleBuffer], Any] | None def dump( obj: Any, - file: _WritableFileobj, - protocol: int | None = ..., + file: SupportsWrite[bytes], + protocol: int | None = None, *, - fix_imports: bool = ..., - buffer_callback: _BufferCallback = ..., + fix_imports: bool = True, + buffer_callback: _BufferCallback = None, ) -> None: ... def dumps( - obj: Any, protocol: int | None = ..., *, fix_imports: bool = ..., buffer_callback: _BufferCallback = ... + obj: Any, protocol: int | None = None, *, fix_imports: bool = True, buffer_callback: _BufferCallback = None ) -> bytes: ... def load( file: _ReadableFileobj, *, - fix_imports: bool = ..., - encoding: str = ..., - errors: str = ..., + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", buffers: Iterable[Any] | None = ..., ) -> Any: ... def loads( __data: ReadableBuffer, *, - fix_imports: bool = ..., - encoding: str = ..., - errors: str = ..., + fix_imports: bool = True, + encoding: str = "ASCII", + errors: str = "strict", buffers: Iterable[Any] | None = ..., ) -> Any: ... else: - def dump(obj: Any, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... - def dumps(obj: Any, protocol: int | None = ..., *, fix_imports: bool = ...) -> bytes: ... - def load(file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... - def loads(data: ReadableBuffer, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... + def dump(obj: Any, file: SupportsWrite[bytes], protocol: int | None = None, *, fix_imports: bool = True) -> None: ... + def dumps(obj: Any, protocol: int | None = None, *, fix_imports: bool = True) -> bytes: ... + def load(file: _ReadableFileobj, *, fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict") -> Any: ... + def loads(data: ReadableBuffer, *, fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict") -> Any: ... class PickleError(Exception): ... class PicklingError(PickleError): ... class UnpicklingError(PickleError): ... -_ReducedType: TypeAlias = Union[ - str, - tuple[Callable[..., Any], tuple[Any, ...]], - tuple[Callable[..., Any], tuple[Any, ...], Any], - tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None], - tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None, Iterator[Any] | None], -] +_ReducedType: TypeAlias = ( + str + | tuple[Callable[..., Any], tuple[Any, ...]] + | tuple[Callable[..., Any], tuple[Any, ...], Any] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None] + | tuple[Callable[..., Any], tuple[Any, ...], Any, Iterator[Any] | None, Iterator[Any] | None] +) class Pickler: fast: bool @@ -162,7 +159,7 @@ class Pickler: if sys.version_info >= (3, 8): def __init__( self, - file: _WritableFileobj, + file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., @@ -170,7 +167,7 @@ class Pickler: ) -> None: ... def reducer_override(self, obj: Any) -> Any: ... else: - def __init__(self, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def __init__(self, file: SupportsWrite[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... def dump(self, __obj: Any) -> None: ... def clear_memo(self) -> None: ... diff --git a/mypy/typeshed/stdlib/pickletools.pyi b/mypy/typeshed/stdlib/pickletools.pyi index c78848464237..542172814926 100644 --- a/mypy/typeshed/stdlib/pickletools.pyi +++ b/mypy/typeshed/stdlib/pickletools.pyi @@ -40,7 +40,7 @@ def read_uint8(f: IO[bytes]) -> int: ... uint8: ArgumentDescriptor -def read_stringnl(f: IO[bytes], decode: bool = ..., stripquotes: bool = ...) -> bytes | str: ... +def read_stringnl(f: IO[bytes], decode: bool = True, stripquotes: bool = True) -> bytes | str: ... stringnl: ArgumentDescriptor @@ -156,12 +156,12 @@ class OpcodeInfo: opcodes: list[OpcodeInfo] -def genops(pickle: bytes | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... -def optimize(p: bytes | IO[bytes]) -> bytes: ... +def genops(pickle: bytes | bytearray | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... +def optimize(p: bytes | bytearray | IO[bytes]) -> bytes: ... def dis( - pickle: bytes | IO[bytes], - out: IO[str] | None = ..., - memo: MutableMapping[int, Any] | None = ..., - indentlevel: int = ..., - annotate: int = ..., + pickle: bytes | bytearray | IO[bytes], + out: IO[str] | None = None, + memo: MutableMapping[int, Any] | None = None, + indentlevel: int = 4, + annotate: int = 0, ) -> None: ... diff --git a/mypy/typeshed/stdlib/pipes.pyi b/mypy/typeshed/stdlib/pipes.pyi index d6bbd7eafac3..fe680bfddf5f 100644 --- a/mypy/typeshed/stdlib/pipes.pyi +++ b/mypy/typeshed/stdlib/pipes.pyi @@ -3,7 +3,6 @@ import os __all__ = ["Template"] class Template: - def __init__(self) -> None: ... def reset(self) -> None: ... def clone(self) -> Template: ... def debug(self, flag: bool) -> None: ... diff --git a/mypy/typeshed/stdlib/pkgutil.pyi b/mypy/typeshed/stdlib/pkgutil.pyi index f91ab78ff35d..f9808c9e5de8 100644 --- a/mypy/typeshed/stdlib/pkgutil.pyi +++ b/mypy/typeshed/stdlib/pkgutil.pyi @@ -29,7 +29,7 @@ class ModuleInfo(NamedTuple): def extend_path(path: _PathT, name: str) -> _PathT: ... class ImpImporter: - def __init__(self, path: str | None = ...) -> None: ... + def __init__(self, path: str | None = None) -> None: ... class ImpLoader: def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... @@ -37,11 +37,11 @@ class ImpLoader: def find_loader(fullname: str) -> Loader | None: ... def get_importer(path_item: str) -> PathEntryFinder | None: ... def get_loader(module_or_name: str) -> Loader | None: ... -def iter_importers(fullname: str = ...) -> Iterator[MetaPathFinder | PathEntryFinder]: ... -def iter_modules(path: Iterable[str] | None = ..., prefix: str = ...) -> Iterator[ModuleInfo]: ... +def iter_importers(fullname: str = "") -> Iterator[MetaPathFinder | PathEntryFinder]: ... +def iter_modules(path: Iterable[str] | None = None, prefix: str = "") -> Iterator[ModuleInfo]: ... def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented def walk_packages( - path: Iterable[str] | None = ..., prefix: str = ..., onerror: Callable[[str], object] | None = ... + path: Iterable[str] | None = None, prefix: str = "", onerror: Callable[[str], object] | None = None ) -> Iterator[ModuleInfo]: ... def get_data(package: str, resource: str) -> bytes | None: ... diff --git a/mypy/typeshed/stdlib/platform.pyi b/mypy/typeshed/stdlib/platform.pyi index 765a7a5ea5f9..291f302b4c7d 100644 --- a/mypy/typeshed/stdlib/platform.pyi +++ b/mypy/typeshed/stdlib/platform.pyi @@ -7,37 +7,39 @@ if sys.version_info < (3, 8): from typing import NamedTuple if sys.version_info >= (3, 8): - def libc_ver(executable: str | None = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> tuple[str, str]: ... + def libc_ver(executable: str | None = None, lib: str = "", version: str = "", chunksize: int = 16384) -> tuple[str, str]: ... else: - def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> tuple[str, str]: ... + def libc_ver( + executable: str = sys.executable, lib: str = "", version: str = "", chunksize: int = 16384 + ) -> tuple[str, str]: ... if sys.version_info < (3, 8): def linux_distribution( - distname: str = ..., - version: str = ..., - id: str = ..., + distname: str = "", + version: str = "", + id: str = "", supported_dists: tuple[str, ...] = ..., full_distribution_name: bool = ..., ) -> tuple[str, str, str]: ... def dist( - distname: str = ..., version: str = ..., id: str = ..., supported_dists: tuple[str, ...] = ... + distname: str = "", version: str = "", id: str = "", supported_dists: tuple[str, ...] = ... ) -> tuple[str, str, str]: ... -def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> tuple[str, str, str, str]: ... +def win32_ver(release: str = "", version: str = "", csd: str = "", ptype: str = "") -> tuple[str, str, str, str]: ... if sys.version_info >= (3, 8): def win32_edition() -> str: ... def win32_is_iot() -> bool: ... def mac_ver( - release: str = ..., versioninfo: tuple[str, str, str] = ..., machine: str = ... + release: str = "", versioninfo: tuple[str, str, str] = ..., machine: str = "" ) -> tuple[str, tuple[str, str, str], str]: ... def java_ver( - release: str = ..., vendor: str = ..., vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... + release: str = "", vendor: str = "", vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... -def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> tuple[str, str]: ... +def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... class uname_result(NamedTuple): system: str diff --git a/mypy/typeshed/stdlib/plistlib.pyi b/mypy/typeshed/stdlib/plistlib.pyi index 9dcfcdb126cb..5b76c935f76e 100644 --- a/mypy/typeshed/stdlib/plistlib.pyi +++ b/mypy/typeshed/stdlib/plistlib.pyi @@ -1,9 +1,10 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer from collections.abc import Mapping, MutableMapping from datetime import datetime from enum import Enum from typing import IO, Any +from typing_extensions import Self if sys.version_info >= (3, 9): __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] @@ -47,45 +48,47 @@ FMT_XML = PlistFormat.FMT_XML FMT_BINARY = PlistFormat.FMT_BINARY if sys.version_info >= (3, 9): - def load(fp: IO[bytes], *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... - def loads(value: bytes, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def load(fp: IO[bytes], *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads( + value: ReadableBuffer, *, fmt: PlistFormat | None = None, dict_type: type[MutableMapping[str, Any]] = ... + ) -> Any: ... else: def load( fp: IO[bytes], *, - fmt: PlistFormat | None = ..., - use_builtin_types: bool = ..., + fmt: PlistFormat | None = None, + use_builtin_types: bool = True, dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def loads( - value: bytes, + value: ReadableBuffer, *, - fmt: PlistFormat | None = ..., - use_builtin_types: bool = ..., + fmt: PlistFormat | None = None, + use_builtin_types: bool = True, dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def dump( - value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, fp: IO[bytes], *, fmt: PlistFormat = ..., - sort_keys: bool = ..., - skipkeys: bool = ..., + sort_keys: bool = True, + skipkeys: bool = False, ) -> None: ... def dumps( - value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | bytearray | datetime, *, fmt: PlistFormat = ..., - skipkeys: bool = ..., - sort_keys: bool = ..., + skipkeys: bool = False, + sort_keys: bool = True, ) -> bytes: ... if sys.version_info < (3, 9): def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... def writePlist(value: Mapping[str, Any], pathOrFile: str | IO[bytes]) -> None: ... - def readPlistFromBytes(data: bytes) -> Any: ... + def readPlistFromBytes(data: ReadableBuffer) -> Any: ... def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... if sys.version_info < (3, 9): @@ -98,8 +101,8 @@ if sys.version_info >= (3, 8): data: int def __init__(self, data: int) -> None: ... def __index__(self) -> int: ... - def __reduce__(self: Self) -> tuple[type[Self], tuple[int]]: ... + def __reduce__(self) -> tuple[type[Self], tuple[int]]: ... def __eq__(self, other: object) -> bool: ... class InvalidFileException(ValueError): - def __init__(self, message: str = ...) -> None: ... + def __init__(self, message: str = "Invalid file") -> None: ... diff --git a/mypy/typeshed/stdlib/poplib.pyi b/mypy/typeshed/stdlib/poplib.pyi index fd7afedaad05..c64e47e8ef72 100644 --- a/mypy/typeshed/stdlib/poplib.pyi +++ b/mypy/typeshed/stdlib/poplib.pyi @@ -25,13 +25,13 @@ class POP3: sock: socket.socket file: BinaryIO welcome: bytes - def __init__(self, host: str, port: int = ..., timeout: float = ...) -> None: ... + def __init__(self, host: str, port: int = 110, timeout: float = ...) -> None: ... def getwelcome(self) -> bytes: ... def set_debuglevel(self, level: int) -> None: ... def user(self, user: str) -> bytes: ... def pass_(self, pswd: str) -> bytes: ... def stat(self) -> tuple[int, int]: ... - def list(self, which: Any | None = ...) -> _LongResp: ... + def list(self, which: Any | None = None) -> _LongResp: ... def retr(self, which: Any) -> _LongResp: ... def dele(self, which: Any) -> bytes: ... def noop(self) -> bytes: ... @@ -48,17 +48,17 @@ class POP3: def uidl(self, which: Any) -> bytes: ... def utf8(self) -> bytes: ... def capa(self) -> dict[str, _list[str]]: ... - def stls(self, context: ssl.SSLContext | None = ...) -> bytes: ... + def stls(self, context: ssl.SSLContext | None = None) -> bytes: ... class POP3_SSL(POP3): def __init__( self, host: str, - port: int = ..., - keyfile: str | None = ..., - certfile: str | None = ..., + port: int = 995, + keyfile: str | None = None, + certfile: str | None = None, timeout: float = ..., - context: ssl.SSLContext | None = ..., + context: ssl.SSLContext | None = None, ) -> None: ... # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> NoReturn: ... + def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ... diff --git a/mypy/typeshed/stdlib/posix.pyi b/mypy/typeshed/stdlib/posix.pyi index 7055f15f3d67..ffd96757586b 100644 --- a/mypy/typeshed/stdlib/posix.pyi +++ b/mypy/typeshed/stdlib/posix.pyi @@ -309,17 +309,10 @@ if sys.platform != "win32": copy_file_range as copy_file_range, memfd_create as memfd_create, ) - from os import register_at_fork as register_at_fork + from os import preadv as preadv, pwritev as pwritev, register_at_fork as register_at_fork if sys.platform != "darwin": - from os import ( - RWF_DSYNC as RWF_DSYNC, - RWF_HIPRI as RWF_HIPRI, - RWF_NOWAIT as RWF_NOWAIT, - RWF_SYNC as RWF_SYNC, - preadv as preadv, - pwritev as pwritev, - ) + from os import RWF_DSYNC as RWF_DSYNC, RWF_HIPRI as RWF_HIPRI, RWF_NOWAIT as RWF_NOWAIT, RWF_SYNC as RWF_SYNC # Not same as os.environ or os.environb # Because of this variable, we can't do "from posix import *" in os/__init__.pyi diff --git a/mypy/typeshed/stdlib/posixpath.pyi b/mypy/typeshed/stdlib/posixpath.pyi index 8d880a072dfb..1945190be5f8 100644 --- a/mypy/typeshed/stdlib/posixpath.pyi +++ b/mypy/typeshed/stdlib/posixpath.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import AnyOrLiteralStr, BytesPath, StrOrBytesPath, StrPath +from _typeshed import AnyOrLiteralStr, BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath from collections.abc import Sequence from genericpath import ( commonprefix as commonprefix, @@ -118,9 +118,9 @@ def join(__a: BytesPath, *paths: BytesPath) -> bytes: ... if sys.version_info >= (3, 10): @overload - def realpath(filename: PathLike[AnyStr], *, strict: bool = ...) -> AnyStr: ... + def realpath(filename: PathLike[AnyStr], *, strict: bool = False) -> AnyStr: ... @overload - def realpath(filename: AnyStr, *, strict: bool = ...) -> AnyStr: ... + def realpath(filename: AnyStr, *, strict: bool = False) -> AnyStr: ... else: @overload @@ -129,11 +129,11 @@ else: def realpath(filename: AnyStr) -> AnyStr: ... @overload -def relpath(path: LiteralString, start: LiteralString | None = ...) -> LiteralString: ... +def relpath(path: LiteralString, start: LiteralString | None = None) -> LiteralString: ... @overload -def relpath(path: BytesPath, start: BytesPath | None = ...) -> bytes: ... +def relpath(path: BytesPath, start: BytesPath | None = None) -> bytes: ... @overload -def relpath(path: StrPath, start: StrPath | None = ...) -> str: ... +def relpath(path: StrPath, start: StrPath | None = None) -> str: ... @overload def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload @@ -147,6 +147,6 @@ def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload def splitext(p: AnyOrLiteralStr) -> tuple[AnyOrLiteralStr, AnyOrLiteralStr]: ... def isabs(s: StrOrBytesPath) -> bool: ... -def islink(path: StrOrBytesPath | int) -> bool: ... -def ismount(path: StrOrBytesPath | int) -> bool: ... -def lexists(path: StrOrBytesPath | int) -> bool: ... +def islink(path: FileDescriptorOrPath) -> bool: ... +def ismount(path: FileDescriptorOrPath) -> bool: ... +def lexists(path: FileDescriptorOrPath) -> bool: ... diff --git a/mypy/typeshed/stdlib/pprint.pyi b/mypy/typeshed/stdlib/pprint.pyi index 0addc8f538b2..5a909c69b077 100644 --- a/mypy/typeshed/stdlib/pprint.pyi +++ b/mypy/typeshed/stdlib/pprint.pyi @@ -9,28 +9,28 @@ else: if sys.version_info >= (3, 10): def pformat( object: object, - indent: int = ..., - width: int = ..., - depth: int | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., - underscore_numbers: bool = ..., + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, ) -> str: ... elif sys.version_info >= (3, 8): def pformat( object: object, - indent: int = ..., - width: int = ..., - depth: int | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., + compact: bool = False, + sort_dicts: bool = True, ) -> str: ... else: - def pformat(object: object, indent: int = ..., width: int = ..., depth: int | None = ..., *, compact: bool = ...) -> str: ... + def pformat(object: object, indent: int = 1, width: int = 80, depth: int | None = None, *, compact: bool = False) -> str: ... if sys.version_info >= (3, 10): def pp( @@ -41,7 +41,7 @@ if sys.version_info >= (3, 10): depth: int | None = ..., *, compact: bool = ..., - sort_dicts: bool = ..., + sort_dicts: bool = False, underscore_numbers: bool = ..., ) -> None: ... @@ -54,43 +54,43 @@ elif sys.version_info >= (3, 8): depth: int | None = ..., *, compact: bool = ..., - sort_dicts: bool = ..., + sort_dicts: bool = False, ) -> None: ... if sys.version_info >= (3, 10): def pprint( object: object, - stream: IO[str] | None = ..., - indent: int = ..., - width: int = ..., - depth: int | None = ..., + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., - underscore_numbers: bool = ..., + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, ) -> None: ... elif sys.version_info >= (3, 8): def pprint( object: object, - stream: IO[str] | None = ..., - indent: int = ..., - width: int = ..., - depth: int | None = ..., + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., + compact: bool = False, + sort_dicts: bool = True, ) -> None: ... else: def pprint( object: object, - stream: IO[str] | None = ..., - indent: int = ..., - width: int = ..., - depth: int | None = ..., + stream: IO[str] | None = None, + indent: int = 1, + width: int = 80, + depth: int | None = None, *, - compact: bool = ..., + compact: bool = False, ) -> None: ... def isreadable(object: object) -> bool: ... @@ -101,35 +101,35 @@ class PrettyPrinter: if sys.version_info >= (3, 10): def __init__( self, - indent: int = ..., - width: int = ..., - depth: int | None = ..., - stream: IO[str] | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., - underscore_numbers: bool = ..., + compact: bool = False, + sort_dicts: bool = True, + underscore_numbers: bool = False, ) -> None: ... elif sys.version_info >= (3, 8): def __init__( self, - indent: int = ..., - width: int = ..., - depth: int | None = ..., - stream: IO[str] | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, *, - compact: bool = ..., - sort_dicts: bool = ..., + compact: bool = False, + sort_dicts: bool = True, ) -> None: ... else: def __init__( self, - indent: int = ..., - width: int = ..., - depth: int | None = ..., - stream: IO[str] | None = ..., + indent: int = 1, + width: int = 80, + depth: int | None = None, + stream: IO[str] | None = None, *, - compact: bool = ..., + compact: bool = False, ) -> None: ... def pformat(self, object: object) -> str: ... diff --git a/mypy/typeshed/stdlib/profile.pyi b/mypy/typeshed/stdlib/profile.pyi index 4b3f832d3224..6ae375004158 100644 --- a/mypy/typeshed/stdlib/profile.pyi +++ b/mypy/typeshed/stdlib/profile.pyi @@ -1,13 +1,13 @@ -from _typeshed import Self, StrOrBytesPath +from _typeshed import StrOrBytesPath from collections.abc import Callable from typing import Any, TypeVar -from typing_extensions import ParamSpec, TypeAlias +from typing_extensions import ParamSpec, Self, TypeAlias __all__ = ["run", "runctx", "Profile"] -def run(statement: str, filename: str | None = ..., sort: str | int = ...) -> None: ... +def run(statement: str, filename: str | None = None, sort: str | int = -1) -> None: ... def runctx( - statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = ..., sort: str | int = ... + statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = None, sort: str | int = -1 ) -> None: ... _T = TypeVar("_T") @@ -17,15 +17,15 @@ _Label: TypeAlias = tuple[str, int, str] class Profile: bias: int stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented - def __init__(self, timer: Callable[[], float] | None = ..., bias: int | None = ...) -> None: ... + def __init__(self, timer: Callable[[], float] | None = None, bias: int | None = None) -> None: ... def set_cmd(self, cmd: str) -> None: ... def simulate_call(self, name: str) -> None: ... def simulate_cmd_complete(self) -> None: ... - def print_stats(self, sort: str | int = ...) -> None: ... + def print_stats(self, sort: str | int = -1) -> None: ... def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... - def run(self: Self, cmd: str) -> Self: ... - def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def run(self, cmd: str) -> Self: ... + def runctx(self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... - def calibrate(self, m: int, verbose: int = ...) -> float: ... + def calibrate(self, m: int, verbose: int = 0) -> float: ... diff --git a/mypy/typeshed/stdlib/pstats.pyi b/mypy/typeshed/stdlib/pstats.pyi index 7629cd63438f..5d25d1bb3641 100644 --- a/mypy/typeshed/stdlib/pstats.pyi +++ b/mypy/typeshed/stdlib/pstats.pyi @@ -1,11 +1,11 @@ import sys -from _typeshed import Self, StrOrBytesPath +from _typeshed import StrOrBytesPath from collections.abc import Iterable from cProfile import Profile as _cProfile from enum import Enum from profile import Profile from typing import IO, Any, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] @@ -30,7 +30,7 @@ if sys.version_info >= (3, 9): @dataclass(unsafe_hash=True) class FunctionProfile: - ncalls: int + ncalls: str tottime: float percall_tottime: float cumtime: float @@ -47,33 +47,33 @@ _SortArgDict: TypeAlias = dict[str, tuple[tuple[tuple[int, int], ...], str]] class Stats: sort_arg_dict_default: _SortArgDict def __init__( - self: Self, + self, __arg: None | str | Profile | _cProfile = ..., *args: None | str | Profile | _cProfile | Self, - stream: IO[Any] | None = ..., + stream: IO[Any] | None = None, ) -> None: ... def init(self, arg: None | str | Profile | _cProfile) -> None: ... def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... def get_top_level_stats(self) -> None: ... - def add(self: Self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... + def add(self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... def dump_stats(self, filename: StrOrBytesPath) -> None: ... def get_sort_arg_defs(self) -> _SortArgDict: ... @overload - def sort_stats(self: Self, field: Literal[-1, 0, 1, 2]) -> Self: ... + def sort_stats(self, field: Literal[-1, 0, 1, 2]) -> Self: ... @overload - def sort_stats(self: Self, *field: str) -> Self: ... - def reverse_order(self: Self) -> Self: ... - def strip_dirs(self: Self) -> Self: ... + def sort_stats(self, *field: str) -> Self: ... + def reverse_order(self) -> Self: ... + def strip_dirs(self) -> Self: ... def calc_callees(self) -> None: ... def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... if sys.version_info >= (3, 9): def get_stats_profile(self) -> StatsProfile: ... def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... - def print_stats(self: Self, *amount: _Selector) -> Self: ... - def print_callees(self: Self, *amount: _Selector) -> Self: ... - def print_callers(self: Self, *amount: _Selector) -> Self: ... + def print_stats(self, *amount: _Selector) -> Self: ... + def print_callees(self, *amount: _Selector) -> Self: ... + def print_callers(self, *amount: _Selector) -> Self: ... def print_call_heading(self, name_size: int, column_title: str) -> None: ... - def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = ...) -> None: ... + def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = "->") -> None: ... def print_title(self) -> None: ... def print_line(self, func: str) -> None: ... diff --git a/mypy/typeshed/stdlib/py_compile.pyi b/mypy/typeshed/stdlib/py_compile.pyi index 1e9b6c2cb209..48f1d7dc3e70 100644 --- a/mypy/typeshed/stdlib/py_compile.pyi +++ b/mypy/typeshed/stdlib/py_compile.pyi @@ -9,7 +9,7 @@ class PyCompileError(Exception): exc_value: BaseException file: str msg: str - def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = ...) -> None: ... + def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = "") -> None: ... class PycInvalidationMode(enum.Enum): TIMESTAMP: int @@ -21,26 +21,26 @@ def _get_default_invalidation_mode() -> PycInvalidationMode: ... if sys.version_info >= (3, 8): def compile( file: AnyStr, - cfile: AnyStr | None = ..., - dfile: AnyStr | None = ..., - doraise: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., - quiet: int = ..., + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, + quiet: int = 0, ) -> AnyStr | None: ... else: def compile( file: AnyStr, - cfile: AnyStr | None = ..., - dfile: AnyStr | None = ..., - doraise: bool = ..., - optimize: int = ..., - invalidation_mode: PycInvalidationMode | None = ..., + cfile: AnyStr | None = None, + dfile: AnyStr | None = None, + doraise: bool = False, + optimize: int = -1, + invalidation_mode: PycInvalidationMode | None = None, ) -> AnyStr | None: ... if sys.version_info >= (3, 10): def main() -> None: ... else: - def main(args: list[str] | None = ...) -> int: ... + def main(args: list[str] | None = None) -> int: ... diff --git a/mypy/typeshed/stdlib/pyclbr.pyi b/mypy/typeshed/stdlib/pyclbr.pyi index ab19b44d7d79..38658a03139c 100644 --- a/mypy/typeshed/stdlib/pyclbr.pyi +++ b/mypy/typeshed/stdlib/pyclbr.pyi @@ -25,13 +25,13 @@ class Class: super_: list[Class | str] | None, file: str, lineno: int, - parent: Class | None = ..., + parent: Class | None = None, *, - end_lineno: int | None = ..., + end_lineno: int | None = None, ) -> None: ... else: def __init__( - self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = ... + self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = None ) -> None: ... class Function: @@ -54,13 +54,13 @@ class Function: name: str, file: str, lineno: int, - parent: Function | Class | None = ..., - is_async: bool = ..., + parent: Function | Class | None = None, + is_async: bool = False, *, - end_lineno: int | None = ..., + end_lineno: int | None = None, ) -> None: ... else: - def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = ...) -> None: ... + def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = None) -> None: ... -def readmodule(module: str, path: Sequence[str] | None = ...) -> dict[str, Class]: ... -def readmodule_ex(module: str, path: Sequence[str] | None = ...) -> dict[str, Class | Function | list[str]]: ... +def readmodule(module: str, path: Sequence[str] | None = None) -> dict[str, Class]: ... +def readmodule_ex(module: str, path: Sequence[str] | None = None) -> dict[str, Class | Function | list[str]]: ... diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index abcffc31111a..c6893d50c66a 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -6,15 +6,16 @@ from collections.abc import Callable, Container, Mapping, MutableMapping from reprlib import Repr from types import MethodType, ModuleType, TracebackType from typing import IO, Any, AnyStr, NoReturn, TypeVar +from typing_extensions import Final, TypeGuard __all__ = ["help"] _T = TypeVar("_T") -__author__: str -__date__: str -__version__: str -__credits__: str +__author__: Final[str] +__date__: Final[str] +__version__: Final[str] +__credits__: Final[str] def pathdirs() -> list[str]: ... def getdoc(object: object) -> str: ... @@ -25,7 +26,7 @@ def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... def cram(text: str, maxlen: int) -> str: ... def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... -def visiblename(name: str, all: Container[str] | None = ..., obj: object | None = ...) -> bool: ... +def visiblename(name: str, all: Container[str] | None = None, obj: object = None) -> bool: ... def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... def ispackage(path: str) -> bool: ... def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... @@ -43,29 +44,23 @@ def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, Modu class Doc: PYTHONDOCS: str - def document(self, object: object, name: str | None = ..., *args: Any) -> str: ... - def fail(self, object: object, name: str | None = ..., *args: Any) -> NoReturn: ... + def document(self, object: object, name: str | None = None, *args: Any) -> str: ... + def fail(self, object: object, name: str | None = None, *args: Any) -> NoReturn: ... @abstractmethod - def docmodule(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docclass(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docclass(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docroutine(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docroutine(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docother(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docother(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docproperty(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docproperty(self, object: object, name: str | None = None, *args: Any) -> str: ... @abstractmethod - def docdata(self, object: object, name: str | None = ..., *args: Any) -> str: ... + def docdata(self, object: object, name: str | None = None, *args: Any) -> str: ... def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... class HTMLRepr(Repr): - maxlist: int - maxtuple: int - maxdict: int - maxstring: int - maxother: int - def __init__(self) -> None: ... def escape(self, text: str) -> str: ... def repr(self, object: object) -> str: ... def repr1(self, x: object, level: complex) -> str: ... @@ -80,32 +75,32 @@ class HTMLDoc(Doc): escape = _repr_instance.escape def page(self, title: str, contents: str) -> str: ... if sys.version_info >= (3, 11): - def heading(self, title: str, extras: str = ...) -> str: ... + def heading(self, title: str, extras: str = "") -> str: ... def section( self, title: str, cls: str, contents: str, - width: int = ..., - prelude: str = ..., - marginalia: str | None = ..., - gap: str = ..., + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", ) -> str: ... def multicolumn(self, list: list[_T], format: Callable[[_T], str]) -> str: ... else: - def heading(self, title: str, fgcol: str, bgcol: str, extras: str = ...) -> str: ... + def heading(self, title: str, fgcol: str, bgcol: str, extras: str = "") -> str: ... def section( self, title: str, fgcol: str, bgcol: str, contents: str, - width: int = ..., - prelude: str = ..., - marginalia: str | None = ..., - gap: str = ..., + width: int = 6, + prelude: str = "", + marginalia: str | None = None, + gap: str = " ", ) -> str: ... - def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = ...) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = 4) -> str: ... def bigsection(self, title: str, *args: Any) -> str: ... def preformat(self, text: str) -> str: ... @@ -117,20 +112,20 @@ class HTMLDoc(Doc): def markup( self, text: str, - escape: Callable[[str], str] | None = ..., + escape: Callable[[str], str] | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., ) -> str: ... def formattree( - self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ... + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None ) -> str: ... - def docmodule(self, object: object, name: str | None = ..., mod: str | None = ..., *ignored: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Any) -> str: ... def docclass( self, object: object, - name: str | None = ..., - mod: str | None = ..., + name: str | None = None, + mod: str | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., *ignored: Any, @@ -139,26 +134,20 @@ class HTMLDoc(Doc): def docroutine( # type: ignore[override] self, object: object, - name: str | None = ..., - mod: str | None = ..., + name: str | None = None, + mod: str | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., - cl: type | None = ..., + cl: type | None = None, ) -> str: ... - def docproperty(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def docother(self, object: object, name: str | None = ..., mod: Any | None = ..., *ignored: Any) -> str: ... - def docdata(self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = ...) -> str: ... + def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docother(self, object: object, name: str | None = None, mod: Any | None = None, *ignored: Any) -> str: ... + def docdata(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = None) -> str: ... def filelink(self, url: str, path: str) -> str: ... class TextRepr(Repr): - maxlist: int - maxtuple: int - maxdict: int - maxstring: int - maxother: int - def __init__(self) -> None: ... def repr1(self, x: object, level: complex) -> str: ... def repr_string(self, x: str, level: complex) -> str: ... def repr_str(self, x: str, level: complex) -> str: ... @@ -168,25 +157,25 @@ class TextDoc(Doc): _repr_instance: TextRepr = ... repr = _repr_instance.repr def bold(self, text: str) -> str: ... - def indent(self, text: str, prefix: str = ...) -> str: ... + def indent(self, text: str, prefix: str = " ") -> str: ... def section(self, title: str, contents: str) -> str: ... def formattree( - self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ..., prefix: str = ... + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None, prefix: str = "" ) -> str: ... - def docmodule(self, object: object, name: str | None = ..., mod: Any | None = ...) -> str: ... # type: ignore[override] - def docclass(self, object: object, name: str | None = ..., mod: str | None = ..., *ignored: Any) -> str: ... + def docmodule(self, object: object, name: str | None = None, mod: Any | None = None) -> str: ... # type: ignore[override] + def docclass(self, object: object, name: str | None = None, mod: str | None = None, *ignored: Any) -> str: ... def formatvalue(self, object: object) -> str: ... - def docroutine(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def docproperty(self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] - def docdata(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] + def docroutine(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = None, mod: Any | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] def docother( # type: ignore[override] self, object: object, - name: str | None = ..., - mod: str | None = ..., - parent: str | None = ..., - maxlen: int | None = ..., - doc: Any | None = ..., + name: str | None = None, + mod: str | None = None, + parent: str | None = None, + maxlen: int | None = None, + doc: Any | None = None, ) -> str: ... def pager(text: str) -> None: ... @@ -203,16 +192,23 @@ text: TextDoc html: HTMLDoc def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... -def render_doc(thing: str | object, title: str = ..., forceload: bool = ..., renderer: Doc | None = ...) -> str: ... -def doc(thing: str | object, title: str = ..., forceload: bool = ..., output: SupportsWrite[str] | None = ...) -> None: ... +def render_doc( + thing: str | object, title: str = "Python Library Documentation: %s", forceload: bool = ..., renderer: Doc | None = None +) -> str: ... +def doc( + thing: str | object, + title: str = "Python Library Documentation: %s", + forceload: bool = ..., + output: SupportsWrite[str] | None = None, +) -> None: ... def writedoc(thing: str | object, forceload: bool = ...) -> None: ... -def writedocs(dir: str, pkgpath: str = ..., done: Any | None = ...) -> None: ... +def writedocs(dir: str, pkgpath: str = "", done: Any | None = None) -> None: ... class Helper: keywords: dict[str, str | tuple[str, str]] symbols: dict[str, str] topics: dict[str, str | tuple[str, ...]] - def __init__(self, input: IO[str] | None = ..., output: IO[str] | None = ...) -> None: ... + def __init__(self, input: IO[str] | None = None, output: IO[str] | None = None) -> None: ... @property def input(self) -> IO[str]: ... @property @@ -222,13 +218,13 @@ class Helper: def getline(self, prompt: str) -> str: ... def help(self, request: Any) -> None: ... def intro(self) -> None: ... - def list(self, items: _list[str], columns: int = ..., width: int = ...) -> None: ... + def list(self, items: _list[str], columns: int = 4, width: int = 80) -> None: ... def listkeywords(self) -> None: ... def listsymbols(self) -> None: ... def listtopics(self) -> None: ... - def showtopic(self, topic: str, more_xrefs: str = ...) -> None: ... + def showtopic(self, topic: str, more_xrefs: str = "") -> None: ... def showsymbol(self, symbol: str) -> None: ... - def listmodules(self, key: str = ...) -> None: ... + def listmodules(self, key: str = "") -> None: ... help: Helper @@ -237,11 +233,11 @@ class ModuleScanner: def run( self, callback: Callable[[str | None, str, str], object], - key: str | None = ..., - completer: Callable[[], object] | None = ..., - onerror: Callable[[str], object] | None = ..., + key: str | None = None, + completer: Callable[[], object] | None = None, + onerror: Callable[[str], object] | None = None, ) -> None: ... def apropos(key: str) -> None: ... -def ispath(x: Any) -> bool: ... +def ispath(x: object) -> TypeGuard[str]: ... def cli() -> None: ... diff --git a/mypy/typeshed/stdlib/pyexpat/__init__.pyi b/mypy/typeshed/stdlib/pyexpat/__init__.pyi index 6a4ed891fe10..9e1eea08be54 100644 --- a/mypy/typeshed/stdlib/pyexpat/__init__.pyi +++ b/mypy/typeshed/stdlib/pyexpat/__init__.pyi @@ -1,7 +1,6 @@ -import pyexpat.errors as errors -import pyexpat.model as model -from _typeshed import SupportsRead +from _typeshed import ReadableBuffer, SupportsRead from collections.abc import Callable +from pyexpat import errors as errors, model as model from typing import Any from typing_extensions import TypeAlias, final @@ -25,14 +24,14 @@ _Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]] @final class XMLParserType: - def Parse(self, __data: str | bytes, __isfinal: bool = ...) -> int: ... + def Parse(self, __data: str | ReadableBuffer, __isfinal: bool = False) -> int: ... def ParseFile(self, __file: SupportsRead[bytes]) -> int: ... def SetBase(self, __base: str) -> None: ... def GetBase(self) -> str | None: ... def GetInputContext(self) -> bytes | None: ... def ExternalEntityParserCreate(self, __context: str | None, __encoding: str = ...) -> XMLParserType: ... def SetParamEntityParsing(self, __flag: int) -> int: ... - def UseForeignDTD(self, __flag: bool = ...) -> None: ... + def UseForeignDTD(self, __flag: bool = True) -> None: ... @property def intern(self) -> dict[str, str]: ... buffer_size: int @@ -77,5 +76,5 @@ def ErrorString(__code: int) -> str: ... # intern is undocumented def ParserCreate( - encoding: str | None = ..., namespace_separator: str | None = ..., intern: dict[str, Any] | None = ... + encoding: str | None = None, namespace_separator: str | None = None, intern: dict[str, Any] | None = None ) -> XMLParserType: ... diff --git a/mypy/typeshed/stdlib/queue.pyi b/mypy/typeshed/stdlib/queue.pyi index 7ea4beb664c5..3537e445ed97 100644 --- a/mypy/typeshed/stdlib/queue.pyi +++ b/mypy/typeshed/stdlib/queue.pyi @@ -23,14 +23,14 @@ class Queue(Generic[_T]): # Despite the fact that `queue` has `deque` type, # we treat it as `Any` to allow different implementations in subtypes. queue: Any # undocumented - def __init__(self, maxsize: int = ...) -> None: ... + def __init__(self, maxsize: int = 0) -> None: ... def _init(self, maxsize: int) -> None: ... def empty(self) -> bool: ... def full(self) -> bool: ... - def get(self, block: bool = ..., timeout: float | None = ...) -> _T: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... def get_nowait(self) -> _T: ... def _get(self) -> _T: ... - def put(self, item: _T, block: bool = ..., timeout: float | None = ...) -> None: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def _put(self, item: _T) -> None: ... def join(self) -> None: ... @@ -49,9 +49,9 @@ class LifoQueue(Queue[_T]): class SimpleQueue(Generic[_T]): def __init__(self) -> None: ... def empty(self) -> bool: ... - def get(self, block: bool = ..., timeout: float | None = ...) -> _T: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... def get_nowait(self) -> _T: ... - def put(self, item: _T, block: bool = ..., timeout: float | None = ...) -> None: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... def put_nowait(self, item: _T) -> None: ... def qsize(self) -> int: ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/quopri.pyi b/mypy/typeshed/stdlib/quopri.pyi index b8dc0787fd1a..b652e139bd0e 100644 --- a/mypy/typeshed/stdlib/quopri.pyi +++ b/mypy/typeshed/stdlib/quopri.pyi @@ -1,8 +1,11 @@ -from typing import BinaryIO +from _typeshed import ReadableBuffer, SupportsNoArgReadline, SupportsRead, SupportsWrite +from typing import Protocol __all__ = ["encode", "decode", "encodestring", "decodestring"] -def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ... -def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ... -def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ... -def decodestring(s: bytes, header: int = ...) -> bytes: ... +class _Input(SupportsRead[bytes], SupportsNoArgReadline[bytes], Protocol): ... + +def encode(input: _Input, output: SupportsWrite[bytes], quotetabs: int, header: bool = False) -> None: ... +def encodestring(s: ReadableBuffer, quotetabs: bool = False, header: bool = False) -> bytes: ... +def decode(input: _Input, output: SupportsWrite[bytes], header: bool = False) -> None: ... +def decodestring(s: str | ReadableBuffer, header: bool = False) -> bytes: ... diff --git a/mypy/typeshed/stdlib/random.pyi b/mypy/typeshed/stdlib/random.pyi index 3bb999bfaaa6..4849878691f5 100644 --- a/mypy/typeshed/stdlib/random.pyi +++ b/mypy/typeshed/stdlib/random.pyi @@ -39,19 +39,18 @@ _T = TypeVar("_T") class Random(_random.Random): VERSION: ClassVar[int] - def __init__(self, x: Any = ...) -> None: ... + def __init__(self, x: Any = None) -> None: ... # Using other `seed` types is deprecated since 3.9 and removed in 3.11 # Ignore Y041, since random.seed doesn't treat int like a float subtype. Having an explicit # int better documents conventional usage of random.seed. if sys.version_info >= (3, 9): - def seed(self, a: int | float | str | bytes | bytearray | None = ..., version: int = ...) -> None: ... # type: ignore[override] # noqa: Y041 + def seed(self, a: int | float | str | bytes | bytearray | None = None, version: int = 2) -> None: ... # type: ignore[override] # noqa: Y041 else: - def seed(self, a: Any = ..., version: int = ...) -> None: ... + def seed(self, a: Any = None, version: int = 2) -> None: ... def getstate(self) -> tuple[Any, ...]: ... def setstate(self, state: tuple[Any, ...]) -> None: ... - def getrandbits(self, __k: int) -> int: ... - def randrange(self, start: int, stop: int | None = ..., step: int = ...) -> int: ... + def randrange(self, start: int, stop: int | None = None, step: int = 1) -> int: ... def randint(self, a: int, b: int) -> int: ... if sys.version_info >= (3, 9): def randbytes(self, n: int) -> bytes: ... @@ -60,33 +59,32 @@ class Random(_random.Random): def choices( self, population: SupportsLenAndGetItem[_T], - weights: Sequence[float | Fraction] | None = ..., + weights: Sequence[float | Fraction] | None = None, *, - cum_weights: Sequence[float | Fraction] | None = ..., - k: int = ..., + cum_weights: Sequence[float | Fraction] | None = None, + k: int = 1, ) -> list[_T]: ... if sys.version_info >= (3, 11): def shuffle(self, x: MutableSequence[Any]) -> None: ... else: - def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = ...) -> None: ... + def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = None) -> None: ... if sys.version_info >= (3, 11): - def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = ...) -> list[_T]: ... + def sample(self, population: Sequence[_T], k: int, *, counts: Iterable[int] | None = None) -> list[_T]: ... elif sys.version_info >= (3, 9): def sample( - self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = ... + self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[int] | None = None ) -> list[_T]: ... else: def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... - def random(self) -> float: ... def uniform(self, a: float, b: float) -> float: ... - def triangular(self, low: float = ..., high: float = ..., mode: float | None = ...) -> float: ... + def triangular(self, low: float = 0.0, high: float = 1.0, mode: float | None = None) -> float: ... def betavariate(self, alpha: float, beta: float) -> float: ... def expovariate(self, lambd: float) -> float: ... def gammavariate(self, alpha: float, beta: float) -> float: ... if sys.version_info >= (3, 11): - def gauss(self, mu: float = ..., sigma: float = ...) -> float: ... - def normalvariate(self, mu: float = ..., sigma: float = ...) -> float: ... + def gauss(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... + def normalvariate(self, mu: float = 0.0, sigma: float = 1.0) -> float: ... else: def gauss(self, mu: float, sigma: float) -> float: ... def normalvariate(self, mu: float, sigma: float) -> float: ... diff --git a/mypy/typeshed/stdlib/re.pyi b/mypy/typeshed/stdlib/re.pyi index 3e52d209eb87..4e53141ade84 100644 --- a/mypy/typeshed/stdlib/re.pyi +++ b/mypy/typeshed/stdlib/re.pyi @@ -67,10 +67,12 @@ class Match(Generic[AnyStr]): @overload def expand(self: Match[str], template: str) -> str: ... @overload - def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... + def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... # type: ignore[misc] + @overload + def expand(self, template: AnyStr) -> AnyStr: ... # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload - def group(self, __group: Literal[0] = ...) -> AnyStr: ... + def group(self, __group: Literal[0] = 0) -> AnyStr: ... @overload def group(self, __group: str | int) -> AnyStr | Any: ... @overload @@ -87,9 +89,9 @@ class Match(Generic[AnyStr]): def groupdict(self) -> dict[str, AnyStr | Any]: ... @overload def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... - def start(self, __group: int | str = ...) -> int: ... - def end(self, __group: int | str = ...) -> int: ... - def span(self, __group: int | str = ...) -> tuple[int, int]: ... + def start(self, __group: int | str = 0) -> int: ... + def end(self, __group: int | str = 0) -> int: ... + def span(self, __group: int | str = 0) -> tuple[int, int]: ... @property def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. @@ -113,48 +115,64 @@ class Pattern(Generic[AnyStr]): @property def pattern(self) -> AnyStr: ... @overload - def search(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] + @overload + def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... + @overload + def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... + @overload + def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] @overload - def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def match(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ... @overload - def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[misc] @overload - def fullmatch(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ... @overload - def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + def split(self: Pattern[str], string: str, maxsplit: int = 0) -> list[str | Any]: ... @overload - def split(self: Pattern[str], string: str, maxsplit: int = ...) -> list[str | Any]: ... + def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0) -> list[bytes | Any]: ... @overload - def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = ...) -> list[bytes | Any]: ... + def split(self, string: AnyStr, maxsplit: int = 0) -> list[AnyStr | Any]: ... # return type depends on the number of groups in the pattern @overload - def findall(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> list[Any]: ... + def findall(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload - def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> list[Any]: ... + def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> list[Any]: ... @overload - def finditer(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Iterator[Match[str]]: ... + def findall(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> list[AnyStr]: ... @overload - def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Iterator[Match[bytes]]: ... + def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ... @overload - def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ...) -> str: ... + def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ... # type: ignore[misc] @overload - def sub( + def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ... + @overload + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ... + @overload + def sub( # type: ignore[misc] self: Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., + count: int = 0, ) -> bytes: ... @overload - def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ...) -> tuple[str, int]: ... + def sub(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> AnyStr: ... @overload - def subn( + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ... + @overload + def subn( # type: ignore[misc] self: Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., + count: int = 0, ) -> tuple[bytes, int]: ... + @overload + def subn(self, repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = 0) -> tuple[AnyStr, int]: ... def __copy__(self) -> Pattern[AnyStr]: ... def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ... if sys.version_info >= (3, 9): @@ -212,59 +230,59 @@ _FlagsType: TypeAlias = int | RegexFlag # pattern arguments do *not* accept arbitrary buffers such as bytearray, # because the pattern must be hashable. @overload -def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +def compile(pattern: AnyStr, flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload -def compile(pattern: Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +def compile(pattern: Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... @overload -def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload -def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload -def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Match[str] | None: ... @overload -def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Match[bytes] | None: ... @overload -def split(pattern: str | Pattern[str], string: str, maxsplit: int = ..., flags: _FlagsType = ...) -> list[str | Any]: ... +def split(pattern: str | Pattern[str], string: str, maxsplit: int = 0, flags: _FlagsType = 0) -> list[str | Any]: ... @overload def split( - pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = ..., flags: _FlagsType = ... + pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = 0, flags: _FlagsType = 0 ) -> list[bytes | Any]: ... @overload -def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> list[Any]: ... +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> list[Any]: ... @overload -def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> list[Any]: ... +def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> list[Any]: ... @overload -def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Iterator[Match[str]]: ... +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = 0) -> Iterator[Match[str]]: ... @overload -def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Iterator[Match[bytes]]: ... +def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = 0) -> Iterator[Match[bytes]]: ... @overload def sub( - pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ..., flags: _FlagsType = ... + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 ) -> str: ... @overload def sub( pattern: bytes | Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., - flags: _FlagsType = ..., + count: int = 0, + flags: _FlagsType = 0, ) -> bytes: ... @overload def subn( - pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ..., flags: _FlagsType = ... + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0, flags: _FlagsType = 0 ) -> tuple[str, int]: ... @overload def subn( pattern: bytes | Pattern[bytes], repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], string: ReadableBuffer, - count: int = ..., - flags: _FlagsType = ..., + count: int = 0, + flags: _FlagsType = 0, ) -> tuple[bytes, int]: ... def escape(pattern: AnyStr) -> AnyStr: ... def purge() -> None: ... -def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = 0) -> Pattern[AnyStr]: ... diff --git a/mypy/typeshed/stdlib/readline.pyi b/mypy/typeshed/stdlib/readline.pyi index ceca2e32f221..14c01a986351 100644 --- a/mypy/typeshed/stdlib/readline.pyi +++ b/mypy/typeshed/stdlib/readline.pyi @@ -8,13 +8,13 @@ if sys.platform != "win32": _CompDisp: TypeAlias = Callable[[str, Sequence[str], int], None] def parse_and_bind(__string: str) -> None: ... - def read_init_file(__filename: StrOrBytesPath | None = ...) -> None: ... + def read_init_file(__filename: StrOrBytesPath | None = None) -> None: ... def get_line_buffer() -> str: ... def insert_text(__string: str) -> None: ... def redisplay() -> None: ... - def read_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... - def write_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... - def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = ...) -> None: ... + def read_history_file(__filename: StrOrBytesPath | None = None) -> None: ... + def write_history_file(__filename: StrOrBytesPath | None = None) -> None: ... + def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = None) -> None: ... def get_history_length() -> int: ... def set_history_length(__length: int) -> None: ... def clear_history() -> None: ... @@ -24,13 +24,13 @@ if sys.platform != "win32": def replace_history_item(__pos: int, __line: str) -> None: ... def add_history(__string: str) -> None: ... def set_auto_history(__enabled: bool) -> None: ... - def set_startup_hook(__function: Callable[[], object] | None = ...) -> None: ... - def set_pre_input_hook(__function: Callable[[], object] | None = ...) -> None: ... - def set_completer(__function: _Completer | None = ...) -> None: ... + def set_startup_hook(__function: Callable[[], object] | None = None) -> None: ... + def set_pre_input_hook(__function: Callable[[], object] | None = None) -> None: ... + def set_completer(__function: _Completer | None = None) -> None: ... def get_completer() -> _Completer | None: ... def get_completion_type() -> int: ... def get_begidx() -> int: ... def get_endidx() -> int: ... def set_completer_delims(__string: str) -> None: ... def get_completer_delims() -> str: ... - def set_completion_display_matches_hook(__function: _CompDisp | None = ...) -> None: ... + def set_completion_display_matches_hook(__function: _CompDisp | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/reprlib.pyi b/mypy/typeshed/stdlib/reprlib.pyi index d5554344c494..21c8a5cd4e0c 100644 --- a/mypy/typeshed/stdlib/reprlib.pyi +++ b/mypy/typeshed/stdlib/reprlib.pyi @@ -8,7 +8,7 @@ __all__ = ["Repr", "repr", "recursive_repr"] _ReprFunc: TypeAlias = Callable[[Any], str] -def recursive_repr(fillvalue: str = ...) -> Callable[[_ReprFunc], _ReprFunc]: ... +def recursive_repr(fillvalue: str = "...") -> Callable[[_ReprFunc], _ReprFunc]: ... class Repr: maxlevel: int @@ -22,7 +22,6 @@ class Repr: maxlong: int maxstring: int maxother: int - def __init__(self) -> None: ... def repr(self, x: Any) -> str: ... def repr1(self, x: Any, level: int) -> str: ... def repr_tuple(self, x: tuple[Any, ...], level: int) -> str: ... diff --git a/mypy/typeshed/stdlib/rlcompleter.pyi b/mypy/typeshed/stdlib/rlcompleter.pyi index 1840b7cfced7..8d9477e3ee45 100644 --- a/mypy/typeshed/stdlib/rlcompleter.pyi +++ b/mypy/typeshed/stdlib/rlcompleter.pyi @@ -3,7 +3,7 @@ from typing import Any __all__ = ["Completer"] class Completer: - def __init__(self, namespace: dict[str, Any] | None = ...) -> None: ... + def __init__(self, namespace: dict[str, Any] | None = None) -> None: ... def complete(self, text: str, state: int) -> str | None: ... def attr_matches(self, text: str) -> list[str]: ... def global_matches(self, text: str) -> list[str]: ... diff --git a/mypy/typeshed/stdlib/runpy.pyi b/mypy/typeshed/stdlib/runpy.pyi index 256f8dab14e9..d4406ea4ac41 100644 --- a/mypy/typeshed/stdlib/runpy.pyi +++ b/mypy/typeshed/stdlib/runpy.pyi @@ -1,6 +1,7 @@ -from _typeshed import Self +from _typeshed import Unused from types import ModuleType from typing import Any +from typing_extensions import Self __all__ = ["run_module", "run_path"] @@ -8,16 +9,16 @@ class _TempModule: mod_name: str module: ModuleType def __init__(self, mod_name: str) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... class _ModifiedArgv0: value: Any def __init__(self, value: Any) -> None: ... def __enter__(self) -> None: ... - def __exit__(self, *args: object) -> None: ... + def __exit__(self, *args: Unused) -> None: ... def run_module( - mod_name: str, init_globals: dict[str, Any] | None = ..., run_name: str | None = ..., alter_sys: bool = ... + mod_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None, alter_sys: bool = False ) -> dict[str, Any]: ... -def run_path(path_name: str, init_globals: dict[str, Any] | None = ..., run_name: str | None = ...) -> dict[str, Any]: ... +def run_path(path_name: str, init_globals: dict[str, Any] | None = None, run_name: str | None = None) -> dict[str, Any]: ... diff --git a/mypy/typeshed/stdlib/sched.pyi b/mypy/typeshed/stdlib/sched.pyi index 29c84f951124..a8ec78d68fd2 100644 --- a/mypy/typeshed/stdlib/sched.pyi +++ b/mypy/typeshed/stdlib/sched.pyi @@ -35,7 +35,7 @@ class scheduler: def enter( self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ... ) -> Event: ... - def run(self, blocking: bool = ...) -> float | None: ... + def run(self, blocking: bool = True) -> float | None: ... def cancel(self, event: Event) -> None: ... def empty(self) -> bool: ... @property diff --git a/mypy/typeshed/stdlib/secrets.pyi b/mypy/typeshed/stdlib/secrets.pyi index 99b7c14ebafc..4861b6f09340 100644 --- a/mypy/typeshed/stdlib/secrets.pyi +++ b/mypy/typeshed/stdlib/secrets.pyi @@ -10,6 +10,6 @@ _T = TypeVar("_T") def randbelow(exclusive_upper_bound: int) -> int: ... def randbits(k: int) -> int: ... def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... -def token_bytes(nbytes: int | None = ...) -> bytes: ... -def token_hex(nbytes: int | None = ...) -> str: ... -def token_urlsafe(nbytes: int | None = ...) -> str: ... +def token_bytes(nbytes: int | None = None) -> bytes: ... +def token_hex(nbytes: int | None = None) -> str: ... +def token_urlsafe(nbytes: int | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/select.pyi b/mypy/typeshed/stdlib/select.pyi index 7cfea9ea0fc1..412fd71ee38d 100644 --- a/mypy/typeshed/stdlib/select.pyi +++ b/mypy/typeshed/stdlib/select.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import FileDescriptorLike, Self +from _typeshed import FileDescriptorLike from collections.abc import Iterable from types import TracebackType from typing import Any -from typing_extensions import final +from typing_extensions import Self, final if sys.platform != "win32": PIPE_BUF: int @@ -21,14 +21,13 @@ if sys.platform != "win32": POLLWRNORM: int class poll: - def __init__(self) -> None: ... def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... def unregister(self, fd: FileDescriptorLike) -> None: ... def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... def select( - __rlist: Iterable[Any], __wlist: Iterable[Any], __xlist: Iterable[Any], __timeout: float | None = ... + __rlist: Iterable[Any], __wlist: Iterable[Any], __xlist: Iterable[Any], __timeout: float | None = None ) -> tuple[list[Any], list[Any], list[Any]]: ... error = OSError @@ -59,7 +58,7 @@ if sys.platform != "linux" and sys.platform != "win32": def __init__(self) -> None: ... def close(self) -> None: ... def control( - self, __changelist: Iterable[kevent] | None, __maxevents: int, __timeout: float | None = ... + self, __changelist: Iterable[kevent] | None, __maxevents: int, __timeout: float | None = None ) -> list[kevent]: ... def fileno(self) -> int: ... @classmethod @@ -107,12 +106,12 @@ if sys.platform == "linux": @final class epoll: def __init__(self, sizehint: int = ..., flags: int = ...) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, - __exc_type: type[BaseException] | None = ..., + __exc_type: type[BaseException] | None = None, __exc_val: BaseException | None = ..., - __exc_tb: TracebackType | None = ..., + __exc_tb: TracebackType | None = None, ) -> None: ... def close(self) -> None: ... closed: bool @@ -120,7 +119,7 @@ if sys.platform == "linux": def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... def unregister(self, fd: FileDescriptorLike) -> None: ... - def poll(self, timeout: float | None = ..., maxevents: int = ...) -> list[tuple[int, int]]: ... + def poll(self, timeout: float | None = None, maxevents: int = -1) -> list[tuple[int, int]]: ... @classmethod def fromfd(cls, __fd: FileDescriptorLike) -> epoll: ... EPOLLERR: int diff --git a/mypy/typeshed/stdlib/selectors.pyi b/mypy/typeshed/stdlib/selectors.pyi index 95dfaa41a5c0..90a923f09355 100644 --- a/mypy/typeshed/stdlib/selectors.pyi +++ b/mypy/typeshed/stdlib/selectors.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import FileDescriptor, FileDescriptorLike, Self +from _typeshed import FileDescriptor, FileDescriptorLike, Unused from abc import ABCMeta, abstractmethod from collections.abc import Mapping from typing import Any, NamedTuple -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias _EventMask: TypeAlias = int @@ -18,38 +18,38 @@ class SelectorKey(NamedTuple): class BaseSelector(metaclass=ABCMeta): @abstractmethod - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... @abstractmethod def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... @abstractmethod - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def close(self) -> None: ... def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... @abstractmethod def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... class SelectSelector(BaseSelector): - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... if sys.platform != "win32": class PollSelector(BaseSelector): - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... if sys.platform == "linux": class EpollSelector(BaseSelector): def fileno(self) -> int: ... - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class DevpollSelector(BaseSelector): @@ -61,13 +61,13 @@ class DevpollSelector(BaseSelector): class KqueueSelector(BaseSelector): def fileno(self) -> int: ... - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class DefaultSelector(BaseSelector): - def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... diff --git a/mypy/typeshed/stdlib/shelve.pyi b/mypy/typeshed/stdlib/shelve.pyi index c801ecd3f186..82d0b03f4049 100644 --- a/mypy/typeshed/stdlib/shelve.pyi +++ b/mypy/typeshed/stdlib/shelve.pyi @@ -1,8 +1,8 @@ -from _typeshed import Self from collections.abc import Iterator, MutableMapping from dbm import _TFlags from types import TracebackType from typing import Any, TypeVar, overload +from typing_extensions import Self __all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] @@ -11,7 +11,7 @@ _VT = TypeVar("_VT") class Shelf(MutableMapping[str, _VT]): def __init__( - self, dict: MutableMapping[bytes, bytes], protocol: int | None = ..., writeback: bool = ..., keyencoding: str = ... + self, dict: MutableMapping[bytes, bytes], protocol: int | None = None, writeback: bool = False, keyencoding: str = "utf-8" ) -> None: ... def __iter__(self) -> Iterator[str]: ... def __len__(self) -> int: ... @@ -23,7 +23,7 @@ class Shelf(MutableMapping[str, _VT]): def __setitem__(self, key: str, value: _VT) -> None: ... def __delitem__(self, key: str) -> None: ... def __contains__(self, key: str) -> bool: ... # type: ignore[override] - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... @@ -38,6 +38,6 @@ class BsdDbShelf(Shelf[_VT]): def last(self) -> tuple[str, _VT]: ... class DbfilenameShelf(Shelf[_VT]): - def __init__(self, filename: str, flag: _TFlags = ..., protocol: int | None = ..., writeback: bool = ...) -> None: ... + def __init__(self, filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> None: ... -def open(filename: str, flag: _TFlags = ..., protocol: int | None = ..., writeback: bool = ...) -> Shelf[Any]: ... +def open(filename: str, flag: _TFlags = "c", protocol: int | None = None, writeback: bool = False) -> Shelf[Any]: ... diff --git a/mypy/typeshed/stdlib/shlex.pyi b/mypy/typeshed/stdlib/shlex.pyi index f9d660594a5a..fa04932db676 100644 --- a/mypy/typeshed/stdlib/shlex.pyi +++ b/mypy/typeshed/stdlib/shlex.pyi @@ -1,14 +1,14 @@ import sys -from _typeshed import Self from collections.abc import Iterable from typing import TextIO +from typing_extensions import Self if sys.version_info >= (3, 8): __all__ = ["shlex", "split", "quote", "join"] else: __all__ = ["shlex", "split", "quote"] -def split(s: str, comments: bool = ..., posix: bool = ...) -> list[str]: ... +def split(s: str, comments: bool = False, posix: bool = True) -> list[str]: ... if sys.version_info >= (3, 8): def join(split_command: Iterable[str]) -> str: ... @@ -34,17 +34,17 @@ class shlex(Iterable[str]): def punctuation_chars(self) -> str: ... def __init__( self, - instream: str | TextIO | None = ..., - infile: str | None = ..., - posix: bool = ..., - punctuation_chars: bool | str = ..., + instream: str | TextIO | None = None, + infile: str | None = None, + posix: bool = False, + punctuation_chars: bool | str = False, ) -> None: ... def get_token(self) -> str: ... def push_token(self, tok: str) -> None: ... def read_token(self) -> str: ... def sourcehook(self, newfile: str) -> tuple[str, TextIO]: ... - def push_source(self, newstream: str | TextIO, newfile: str | None = ...) -> None: ... + def push_source(self, newstream: str | TextIO, newfile: str | None = None) -> None: ... def pop_source(self) -> None: ... - def error_leader(self, infile: str | None = ..., lineno: int | None = ...) -> None: ... - def __iter__(self: Self) -> Self: ... + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> None: ... + def __iter__(self) -> Self: ... def __next__(self) -> str: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index 13c706de1cf4..0e4f521e5e34 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -1,8 +1,8 @@ import os import sys -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from _typeshed import BytesPath, FileDescriptorOrPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite from collections.abc import Callable, Iterable, Sequence -from typing import Any, AnyStr, NamedTuple, TypeVar, overload +from typing import Any, AnyStr, NamedTuple, Protocol, TypeVar, overload from typing_extensions import TypeAlias __all__ = [ @@ -47,50 +47,64 @@ class ExecError(OSError): ... class ReadError(OSError): ... class RegistryError(Exception): ... -def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = ...) -> None: ... -def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = ...) -> _StrOrBytesPathT: ... -def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... -def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... +if sys.version_info >= (3, 8): + def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 0) -> None: ... + +else: + def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = 16384) -> None: ... + +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = True) -> _StrOrBytesPathT: ... +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = True) -> None: ... @overload -def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... @overload -def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... @overload -def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = True) -> _PathReturn: ... @overload -def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = True) -> _PathReturn: ... def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... if sys.version_info >= (3, 8): def copytree( src: StrPath, dst: StrPath, - symlinks: bool = ..., - ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = ..., + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, copy_function: Callable[[str, str], object] = ..., - ignore_dangling_symlinks: bool = ..., - dirs_exist_ok: bool = ..., + ignore_dangling_symlinks: bool = False, + dirs_exist_ok: bool = False, ) -> _PathReturn: ... else: def copytree( src: StrPath, dst: StrPath, - symlinks: bool = ..., - ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = ..., + symlinks: bool = False, + ignore: None | Callable[[str, list[str]], Iterable[str]] | Callable[[StrPath, list[str]], Iterable[str]] = None, copy_function: Callable[[str, str], object] = ..., - ignore_dangling_symlinks: bool = ..., + ignore_dangling_symlinks: bool = False, ) -> _PathReturn: ... _OnErrorCallback: TypeAlias = Callable[[Callable[..., Any], Any, Any], object] -if sys.version_info >= (3, 11): - def rmtree( - path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ..., *, dir_fd: int | None = ... - ) -> None: ... +class _RmtreeType(Protocol): + avoids_symlink_attacks: bool + if sys.version_info >= (3, 11): + def __call__( + self, + path: StrOrBytesPath, + ignore_errors: bool = ..., + onerror: _OnErrorCallback | None = ..., + *, + dir_fd: int | None = ..., + ) -> None: ... -else: - def rmtree(path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... + else: + def __call__(self, path: StrOrBytesPath, ignore_errors: bool = ..., onerror: _OnErrorCallback | None = ...) -> None: ... + +rmtree: _RmtreeType _CopyFn: TypeAlias = Callable[[str, str], object] | Callable[[StrPath, StrPath], object] @@ -109,15 +123,15 @@ class _ntuple_diskusage(NamedTuple): used: int free: int -def disk_usage(path: int | StrOrBytesPath) -> _ntuple_diskusage: ... +def disk_usage(path: FileDescriptorOrPath) -> _ntuple_diskusage: ... # While chown can be imported on Windows, it doesn't actually work; # see https://bugs.python.org/issue33140. We keep it here because it's # in __all__. @overload -def chown(path: StrOrBytesPath, user: str | int, group: None = ...) -> None: ... +def chown(path: StrOrBytesPath, user: str | int, group: None = None) -> None: ... @overload -def chown(path: StrOrBytesPath, user: None = ..., *, group: str | int) -> None: ... +def chown(path: StrOrBytesPath, user: None = None, *, group: str | int) -> None: ... @overload def chown(path: StrOrBytesPath, user: None, group: str | int) -> None: ... @overload @@ -125,46 +139,46 @@ def chown(path: StrOrBytesPath, user: str | int, group: str | int) -> None: ... if sys.version_info >= (3, 8): @overload - def which(cmd: _StrPathT, mode: int = ..., path: StrPath | None = ...) -> str | _StrPathT | None: ... + def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... @overload - def which(cmd: bytes, mode: int = ..., path: StrPath | None = ...) -> bytes | None: ... + def which(cmd: bytes, mode: int = 1, path: StrPath | None = None) -> bytes | None: ... else: - def which(cmd: _StrPathT, mode: int = ..., path: StrPath | None = ...) -> str | _StrPathT | None: ... + def which(cmd: _StrPathT, mode: int = 1, path: StrPath | None = None) -> str | _StrPathT | None: ... def make_archive( base_name: str, format: str, - root_dir: StrPath | None = ..., - base_dir: StrPath | None = ..., + root_dir: StrPath | None = None, + base_dir: StrPath | None = None, verbose: bool = ..., dry_run: bool = ..., - owner: str | None = ..., - group: str | None = ..., - logger: Any | None = ..., + owner: str | None = None, + group: str | None = None, + logger: Any | None = None, ) -> str: ... def get_archive_formats() -> list[tuple[str, str]]: ... @overload def register_archive_format( - name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = ... + name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = "" ) -> None: ... @overload def register_archive_format( - name: str, function: Callable[[str, str], object], extra_args: None = ..., description: str = ... + name: str, function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... def unregister_archive_format(name: str) -> None: ... -def unpack_archive(filename: StrPath, extract_dir: StrPath | None = ..., format: str | None = ...) -> None: ... +def unpack_archive(filename: StrPath, extract_dir: StrPath | None = None, format: str | None = None) -> None: ... @overload def register_unpack_format( name: str, extensions: list[str], function: Callable[..., object], extra_args: Sequence[tuple[str, Any]], - description: str = ..., + description: str = "", ) -> None: ... @overload def register_unpack_format( - name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = ..., description: str = ... + name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = None, description: str = "" ) -> None: ... def unregister_unpack_format(name: str) -> None: ... def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi index 8e9bd990a2c2..e411d47016b6 100644 --- a/mypy/typeshed/stdlib/signal.pyi +++ b/mypy/typeshed/stdlib/signal.pyi @@ -3,7 +3,7 @@ from _typeshed import structseq from collections.abc import Callable, Iterable from enum import IntEnum from types import FrameType -from typing import Any, Union +from typing import Any from typing_extensions import Final, Never, TypeAlias, final NSIG: int @@ -62,7 +62,7 @@ SIG_DFL: Handlers SIG_IGN: Handlers _SIGNUM: TypeAlias = int | Signals -_HANDLER: TypeAlias = Union[Callable[[int, FrameType | None], Any], int, Handlers, None] +_HANDLER: TypeAlias = Callable[[int, FrameType | None], Any] | int | Handlers | None def default_int_handler(__signalnum: int, __frame: FrameType | None) -> Never: ... @@ -113,7 +113,7 @@ else: SIGXCPU: Signals SIGXFSZ: Signals - class ItimerError(IOError): ... + class ItimerError(OSError): ... ITIMER_PROF: int ITIMER_REAL: int ITIMER_VIRTUAL: int @@ -134,7 +134,7 @@ else: else: def pthread_sigmask(__how: int, __mask: Iterable[int]) -> set[_SIGNUM]: ... - def setitimer(__which: int, __seconds: float, __interval: float = ...) -> tuple[float, float]: ... + def setitimer(__which: int, __seconds: float, __interval: float = 0.0) -> tuple[float, float]: ... def siginterrupt(__signalnum: int, __flag: bool) -> None: ... def sigpending() -> Any: ... if sys.version_info >= (3, 10): # argument changed in 3.10.2 @@ -178,4 +178,4 @@ def set_wakeup_fd(fd: int, *, warn_on_full_buffer: bool = ...) -> int: ... if sys.version_info >= (3, 9): if sys.platform == "linux": - def pidfd_send_signal(__pidfd: int, __sig: int, __siginfo: None = ..., __flags: int = ...) -> None: ... + def pidfd_send_signal(__pidfd: int, __sig: int, __siginfo: None = None, __flags: int = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/site.pyi b/mypy/typeshed/stdlib/site.pyi index 53199db0eaf3..a8c6bcb417f4 100644 --- a/mypy/typeshed/stdlib/site.pyi +++ b/mypy/typeshed/stdlib/site.pyi @@ -9,14 +9,14 @@ USER_BASE: str | None def main() -> None: ... def abs_paths() -> None: ... # undocumented def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented -def addsitedir(sitedir: str, known_paths: set[str] | None = ...) -> None: ... -def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = ...) -> set[str] | None: ... # undocumented +def addsitedir(sitedir: str, known_paths: set[str] | None = None) -> None: ... +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = None) -> set[str] | None: ... # undocumented def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented def check_enableusersite() -> bool | None: ... # undocumented def enablerlcompleter() -> None: ... # undocumented def execsitecustomize() -> None: ... # undocumented def execusercustomize() -> None: ... # undocumented -def getsitepackages(prefixes: Iterable[str] | None = ...) -> list[str]: ... +def getsitepackages(prefixes: Iterable[str] | None = None) -> list[str]: ... def getuserbase() -> str: ... def getusersitepackages() -> str: ... def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented diff --git a/mypy/typeshed/stdlib/smtpd.pyi b/mypy/typeshed/stdlib/smtpd.pyi index f2de6c155c07..7392bd51627d 100644 --- a/mypy/typeshed/stdlib/smtpd.pyi +++ b/mypy/typeshed/stdlib/smtpd.pyi @@ -41,10 +41,10 @@ class SMTPChannel(asynchat.async_chat): server: SMTPServer, conn: socket.socket, addr: Any, - data_size_limit: int = ..., - map: asyncore._MapType | None = ..., - enable_SMTPUTF8: bool = ..., - decode_data: bool = ..., + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, ) -> None: ... # base asynchat.async_chat.push() accepts bytes def push(self, msg: str) -> None: ... # type: ignore[override] @@ -71,10 +71,10 @@ class SMTPServer(asyncore.dispatcher): self, localaddr: _Address, remoteaddr: _Address, - data_size_limit: int = ..., - map: asyncore._MapType | None = ..., - enable_SMTPUTF8: bool = ..., - decode_data: bool = ..., + data_size_limit: int = 33554432, + map: asyncore._MapType | None = None, + enable_SMTPUTF8: bool = False, + decode_data: bool = False, ) -> None: ... def handle_accepted(self, conn: socket.socket, addr: Any) -> None: ... def process_message( diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index c42841c43e7f..0d7595fc1d6d 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -1,5 +1,6 @@ import sys -from _typeshed import Self +from _socket import _Address as _SourceAddress +from _typeshed import ReadableBuffer, _BufferWithLen from collections.abc import Sequence from email.message import Message as _Message from re import Pattern @@ -7,7 +8,7 @@ from socket import socket from ssl import SSLContext from types import TracebackType from typing import Any, Protocol, overload -from typing_extensions import TypeAlias +from typing_extensions import Self, TypeAlias __all__ = [ "SMTPException", @@ -28,8 +29,6 @@ __all__ = [ _Reply: TypeAlias = tuple[int, bytes] _SendErrs: TypeAlias = dict[str, _Reply] -# Should match source_address for socket.create_connection -_SourceAddress: TypeAlias = tuple[bytearray | bytes | str, int] SMTP_PORT: int SMTP_SSL_PORT: int @@ -49,7 +48,6 @@ class SMTPResponseException(SMTPException): def __init__(self, code: int, msg: bytes | str) -> None: ... class SMTPSenderRefused(SMTPResponseException): - smtp_code: int smtp_error: bytes sender: str args: tuple[int, bytes, str] @@ -70,7 +68,7 @@ def quotedata(data: str) -> str: ... class _AuthObject(Protocol): @overload - def __call__(self, challenge: None = ...) -> str | None: ... + def __call__(self, challenge: None = None) -> str | None: ... @overload def __call__(self, challenge: bytes) -> str: ... @@ -91,59 +89,59 @@ class SMTP: local_hostname: str def __init__( self, - host: str = ..., - port: int = ..., - local_hostname: str | None = ..., + host: str = "", + port: int = 0, + local_hostname: str | None = None, timeout: float = ..., - source_address: _SourceAddress | None = ..., + source_address: _SourceAddress | None = None, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... - def connect(self, host: str = ..., port: int = ..., source_address: _SourceAddress | None = ...) -> _Reply: ... - def send(self, s: bytes | str) -> None: ... - def putcmd(self, cmd: str, args: str = ...) -> None: ... + def connect(self, host: str = "localhost", port: int = 0, source_address: _SourceAddress | None = None) -> _Reply: ... + def send(self, s: ReadableBuffer | str) -> None: ... + def putcmd(self, cmd: str, args: str = "") -> None: ... def getreply(self) -> _Reply: ... - def docmd(self, cmd: str, args: str = ...) -> _Reply: ... - def helo(self, name: str = ...) -> _Reply: ... - def ehlo(self, name: str = ...) -> _Reply: ... + def docmd(self, cmd: str, args: str = "") -> _Reply: ... + def helo(self, name: str = "") -> _Reply: ... + def ehlo(self, name: str = "") -> _Reply: ... def has_extn(self, opt: str) -> bool: ... - def help(self, args: str = ...) -> bytes: ... + def help(self, args: str = "") -> bytes: ... def rset(self) -> _Reply: ... def noop(self) -> _Reply: ... def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... - def data(self, msg: bytes | str) -> _Reply: ... + def data(self, msg: ReadableBuffer | str) -> _Reply: ... def verify(self, address: str) -> _Reply: ... vrfy = verify def expn(self, address: str) -> _Reply: ... def ehlo_or_helo_if_needed(self) -> None: ... user: str password: str - def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = ...) -> _Reply: ... + def auth(self, mechanism: str, authobject: _AuthObject, *, initial_response_ok: bool = True) -> _Reply: ... @overload - def auth_cram_md5(self, challenge: None = ...) -> None: ... + def auth_cram_md5(self, challenge: None = None) -> None: ... @overload - def auth_cram_md5(self, challenge: bytes) -> str: ... - def auth_plain(self, challenge: bytes | None = ...) -> str: ... - def auth_login(self, challenge: bytes | None = ...) -> str: ... - def login(self, user: str, password: str, *, initial_response_ok: bool = ...) -> _Reply: ... - def starttls(self, keyfile: str | None = ..., certfile: str | None = ..., context: SSLContext | None = ...) -> _Reply: ... + def auth_cram_md5(self, challenge: ReadableBuffer) -> str: ... + def auth_plain(self, challenge: ReadableBuffer | None = None) -> str: ... + def auth_login(self, challenge: ReadableBuffer | None = None) -> str: ... + def login(self, user: str, password: str, *, initial_response_ok: bool = True) -> _Reply: ... + def starttls(self, keyfile: str | None = None, certfile: str | None = None, context: SSLContext | None = None) -> _Reply: ... def sendmail( self, from_addr: str, to_addrs: str | Sequence[str], - msg: bytes | str, + msg: _BufferWithLen | str, mail_options: Sequence[str] = ..., rcpt_options: Sequence[str] = ..., ) -> _SendErrs: ... def send_message( self, msg: _Message, - from_addr: str | None = ..., - to_addrs: str | Sequence[str] | None = ..., + from_addr: str | None = None, + to_addrs: str | Sequence[str] | None = None, mail_options: Sequence[str] = ..., rcpt_options: Sequence[str] = ..., ) -> _SendErrs: ... @@ -151,20 +149,19 @@ class SMTP: def quit(self) -> _Reply: ... class SMTP_SSL(SMTP): - default_port: int keyfile: str | None certfile: str | None context: SSLContext def __init__( self, - host: str = ..., - port: int = ..., - local_hostname: str | None = ..., - keyfile: str | None = ..., - certfile: str | None = ..., + host: str = "", + port: int = 0, + local_hostname: str | None = None, + keyfile: str | None = None, + certfile: str | None = None, timeout: float = ..., - source_address: _SourceAddress | None = ..., - context: SSLContext | None = ..., + source_address: _SourceAddress | None = None, + context: SSLContext | None = None, ) -> None: ... LMTP_PORT: int @@ -173,13 +170,17 @@ class LMTP(SMTP): if sys.version_info >= (3, 9): def __init__( self, - host: str = ..., - port: int = ..., - local_hostname: str | None = ..., - source_address: _SourceAddress | None = ..., + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, timeout: float = ..., ) -> None: ... else: def __init__( - self, host: str = ..., port: int = ..., local_hostname: str | None = ..., source_address: _SourceAddress | None = ... + self, + host: str = "", + port: int = 2003, + local_hostname: str | None = None, + source_address: _SourceAddress | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/socket.pyi b/mypy/typeshed/stdlib/socket.pyi index a0f5708bf806..dbc1d46ec1d4 100644 --- a/mypy/typeshed/stdlib/socket.pyi +++ b/mypy/typeshed/stdlib/socket.pyi @@ -1,15 +1,8 @@ -import sys -from _typeshed import ReadableBuffer, Self, WriteableBuffer -from collections.abc import Iterable -from enum import IntEnum, IntFlag -from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper -from typing import Any, Protocol, overload -from typing_extensions import Literal - # Ideally, we'd just do "from _socket import *". Unfortunately, socket # overrides some definitions from _socket incompatibly. mypy incorrectly # prefers the definitions from _socket over those defined here. import _socket +import sys from _socket import ( _FD, EAI_AGAIN as EAI_AGAIN, @@ -119,6 +112,12 @@ from _socket import ( setdefaulttimeout as setdefaulttimeout, timeout as timeout, ) +from _typeshed import ReadableBuffer, Unused, WriteableBuffer +from collections.abc import Iterable +from enum import IntEnum, IntFlag +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Protocol, overload +from typing_extensions import Literal, Self if sys.platform != "darwin" or sys.version_info >= (3, 9): from _socket import ( @@ -297,6 +296,20 @@ if sys.platform == "linux": CAN_RAW_RECV_OWN_MSGS as CAN_RAW_RECV_OWN_MSGS, CAN_RTR_FLAG as CAN_RTR_FLAG, CAN_SFF_MASK as CAN_SFF_MASK, + NETLINK_ARPD as NETLINK_ARPD, + NETLINK_CRYPTO as NETLINK_CRYPTO, + NETLINK_DNRTMSG as NETLINK_DNRTMSG, + NETLINK_FIREWALL as NETLINK_FIREWALL, + NETLINK_IP6_FW as NETLINK_IP6_FW, + NETLINK_NFLOG as NETLINK_NFLOG, + NETLINK_ROUTE as NETLINK_ROUTE, + NETLINK_ROUTE6 as NETLINK_ROUTE6, + NETLINK_SKIP as NETLINK_SKIP, + NETLINK_TAPBASE as NETLINK_TAPBASE, + NETLINK_TCPDIAG as NETLINK_TCPDIAG, + NETLINK_USERSOCK as NETLINK_USERSOCK, + NETLINK_W1 as NETLINK_W1, + NETLINK_XFRM as NETLINK_XFRM, PACKET_BROADCAST as PACKET_BROADCAST, PACKET_FASTROUTE as PACKET_FASTROUTE, PACKET_HOST as PACKET_HOST, @@ -642,11 +655,11 @@ class _SendableFile(Protocol): class socket(_socket.socket): def __init__( - self, family: AddressFamily | int = ..., type: SocketKind | int = ..., proto: int = ..., fileno: int | None = ... + self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None ) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... - def dup(self: Self) -> Self: ... # noqa: F811 + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def dup(self) -> Self: ... # noqa: F811 def accept(self) -> tuple[socket, _RetAddress]: ... # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded @@ -656,39 +669,39 @@ class socket(_socket.socket): mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], buffering: Literal[0], *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> SocketIO: ... @overload def makefile( self, mode: Literal["rwb", "rbw", "wrb", "wbr", "brw", "bwr"], - buffering: Literal[-1, 1] | None = ..., + buffering: Literal[-1, 1] | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BufferedRWPair: ... @overload def makefile( self, mode: Literal["rb", "br"], - buffering: Literal[-1, 1] | None = ..., + buffering: Literal[-1, 1] | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BufferedReader: ... @overload def makefile( self, mode: Literal["wb", "bw"], - buffering: Literal[-1, 1] | None = ..., + buffering: Literal[-1, 1] | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> BufferedWriter: ... @overload def makefile( @@ -696,21 +709,21 @@ class socket(_socket.socket): mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], buffering: int, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> IOBase: ... @overload def makefile( self, - mode: Literal["r", "w", "rw", "wr", ""] = ..., - buffering: int | None = ..., + mode: Literal["r", "w", "rw", "wr", ""] = "r", + buffering: int | None = None, *, - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, ) -> TextIOWrapper: ... - def sendfile(self, file: _SendableFile, offset: int = ..., count: int | None = ...) -> int: ... + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... @property def family(self) -> AddressFamily: ... # type: ignore[override] @property @@ -718,25 +731,24 @@ class socket(_socket.socket): def get_inheritable(self) -> bool: ... def set_inheritable(self, inheritable: bool) -> None: ... -def fromfd(fd: _FD, family: AddressFamily | int, type: SocketKind | int, proto: int = ...) -> socket: ... +def fromfd(fd: _FD, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... if sys.platform != "win32": if sys.version_info >= (3, 9): - # flags and address appear to be unused in send_fds and recv_fds def send_fds( - sock: socket, buffers: Iterable[bytes], fds: bytes | Iterable[int], flags: int = ..., address: None = ... + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None ) -> int: ... - def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = ...) -> tuple[bytes, list[int], int, Any]: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... if sys.platform == "win32": def fromshare(info: bytes) -> socket: ... if sys.platform == "win32": - def socketpair(family: int = ..., type: int = ..., proto: int = ...) -> tuple[socket, socket]: ... + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... else: def socketpair( - family: int | AddressFamily | None = ..., type: SocketType | int = ..., proto: int = ... + family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 ) -> tuple[socket, socket]: ... class SocketIO(RawIOBase): @@ -748,31 +760,34 @@ class SocketIO(RawIOBase): @property def mode(self) -> Literal["rb", "wb", "rwb"]: ... -def getfqdn(name: str = ...) -> str: ... +def getfqdn(name: str = "") -> str: ... if sys.version_info >= (3, 11): def create_connection( address: tuple[str | None, int], timeout: float | None = ..., # noqa: F811 - source_address: tuple[bytearray | bytes | str, int] | None = ..., + source_address: _Address | None = None, *, - all_errors: bool = ..., + all_errors: bool = False, ) -> socket: ... else: def create_connection( - address: tuple[str | None, int], - timeout: float | None = ..., # noqa: F811 - source_address: tuple[bytearray | bytes | str, int] | None = ..., + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None # noqa: F811 ) -> socket: ... if sys.version_info >= (3, 8): def has_dualstack_ipv6() -> bool: ... def create_server( - address: _Address, *, family: int = ..., backlog: int | None = ..., reuse_port: bool = ..., dualstack_ipv6: bool = ... + address: _Address, + *, + family: int = ..., + backlog: int | None = None, + reuse_port: bool = False, + dualstack_ipv6: bool = False, ) -> socket: ... # the 5th tuple item is an address def getaddrinfo( - host: bytes | str | None, port: str | int | None, family: int = ..., type: int = ..., proto: int = ..., flags: int = ... + host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... diff --git a/mypy/typeshed/stdlib/socketserver.pyi b/mypy/typeshed/stdlib/socketserver.pyi index f1d127ebe6a1..3f0bb0eea0ce 100644 --- a/mypy/typeshed/stdlib/socketserver.pyi +++ b/mypy/typeshed/stdlib/socketserver.pyi @@ -1,10 +1,11 @@ import sys import types -from _typeshed import Self +from _socket import _Address, _RetAddress +from _typeshed import ReadableBuffer from collections.abc import Callable from socket import socket as _socket -from typing import Any, BinaryIO, ClassVar, Union -from typing_extensions import TypeAlias +from typing import Any, BinaryIO, ClassVar +from typing_extensions import Self, TypeAlias __all__ = [ "BaseServer", @@ -28,40 +29,41 @@ if sys.platform != "win32": "UnixStreamServer", ] -_RequestType: TypeAlias = Union[_socket, tuple[bytes, _socket]] -_AddressType: TypeAlias = Union[tuple[str, int], str] +_RequestType: TypeAlias = _socket | tuple[bytes, _socket] +_AfUnixAddress: TypeAlias = str | ReadableBuffer # adddress acceptable for an AF_UNIX socket +_AfInetAddress: TypeAlias = tuple[str | bytes | bytearray, int] # address acceptable for an AF_INET socket # This can possibly be generic at some point: class BaseServer: address_family: int - server_address: tuple[str, int] + server_address: _Address socket: _socket allow_reuse_address: bool request_queue_size: int socket_type: int timeout: float | None def __init__( - self: Self, server_address: Any, RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler] + self, server_address: _Address, RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler] ) -> None: ... # It is not actually a `@property`, but we need a `Self` type: @property - def RequestHandlerClass(self: Self) -> Callable[[Any, Any, Self], BaseRequestHandler]: ... + def RequestHandlerClass(self) -> Callable[[Any, _RetAddress, Self], BaseRequestHandler]: ... @RequestHandlerClass.setter - def RequestHandlerClass(self: Self, val: Callable[[Any, Any, Self], BaseRequestHandler]) -> None: ... + def RequestHandlerClass(self, val: Callable[[Any, _RetAddress, Self], BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... - def serve_forever(self, poll_interval: float = ...) -> None: ... + def serve_forever(self, poll_interval: float = 0.5) -> None: ... def shutdown(self) -> None: ... def server_close(self) -> None: ... - def finish_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def finish_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def get_request(self) -> tuple[Any, Any]: ... - def handle_error(self, request: _RequestType, client_address: _AddressType) -> None: ... + def handle_error(self, request: _RequestType, client_address: _RetAddress) -> None: ... def handle_timeout(self) -> None: ... - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_activate(self) -> None: ... def server_bind(self) -> None: ... - def verify_request(self, request: _RequestType, client_address: _AddressType) -> bool: ... - def __enter__(self: Self) -> Self: ... + def verify_request(self, request: _RequestType, client_address: _RetAddress) -> bool: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... @@ -70,37 +72,38 @@ class BaseServer: def close_request(self, request: _RequestType) -> None: ... # undocumented class TCPServer(BaseServer): - allow_reuse_port: bool - request_queue_size: int + if sys.version_info >= (3, 11): + allow_reuse_port: bool + server_address: _AfInetAddress # type: ignore[assignment] def __init__( - self: Self, - server_address: tuple[str, int], - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], - bind_and_activate: bool = ..., + self, + server_address: _AfInetAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, ) -> None: ... - def get_request(self) -> tuple[_socket, Any]: ... + def get_request(self) -> tuple[_socket, _RetAddress]: ... -class UDPServer(BaseServer): - if sys.version_info >= (3, 11): - allow_reuse_port: bool +class UDPServer(TCPServer): max_packet_size: ClassVar[int] - def get_request(self) -> tuple[tuple[bytes, _socket], Any]: ... + def get_request(self) -> tuple[tuple[bytes, _socket], _RetAddress]: ... # type: ignore[override] if sys.platform != "win32": class UnixStreamServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] def __init__( - self: Self, - server_address: str | bytes, - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], - bind_and_activate: bool = ..., + self, + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, ) -> None: ... class UnixDatagramServer(BaseServer): + server_address: _AfUnixAddress # type: ignore[assignment] def __init__( - self: Self, - server_address: str | bytes, - RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], - bind_and_activate: bool = ..., + self, + server_address: _AfUnixAddress, + RequestHandlerClass: Callable[[Any, _RetAddress, Self], BaseRequestHandler], + bind_and_activate: bool = True, ) -> None: ... if sys.platform != "win32": @@ -109,17 +112,17 @@ if sys.platform != "win32": active_children: set[int] | None # undocumented max_children: int # undocumented block_on_close: bool - def collect_children(self, *, blocking: bool = ...) -> None: ... # undocumented + def collect_children(self, *, blocking: bool = False) -> None: ... # undocumented def handle_timeout(self) -> None: ... # undocumented def service_actions(self) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... class ThreadingMixIn: daemon_threads: bool block_on_close: bool - def process_request_thread(self, request: _RequestType, client_address: _AddressType) -> None: ... # undocumented - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... + def process_request_thread(self, request: _RequestType, client_address: _RetAddress) -> None: ... # undocumented + def process_request(self, request: _RequestType, client_address: _RetAddress) -> None: ... def server_close(self) -> None: ... if sys.platform != "win32": @@ -134,16 +137,16 @@ if sys.platform != "win32": class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): ... class BaseRequestHandler: - # Those are technically of types, respectively: - # * _RequestType - # * _AddressType - # But there are some concerns that having unions here would cause + # `request` is technically of type _RequestType, + # but there are some concerns that having a union here would cause # too much inconvenience to people using it (see # https://github.com/python/typeshed/pull/384#issuecomment-234649696) + # + # Note also that _RetAddress is also just an alias for `Any` request: Any - client_address: Any + client_address: _RetAddress server: BaseServer - def __init__(self, request: _RequestType, client_address: _AddressType, server: BaseServer) -> None: ... + def __init__(self, request: _RequestType, client_address: _RetAddress, server: BaseServer) -> None: ... def setup(self) -> None: ... def handle(self) -> None: ... def finish(self) -> None: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index 83d2df1e6da9..26188445547e 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -1,11 +1,11 @@ import sqlite3 import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, SupportsLenAndGetItem +from _typeshed import Incomplete, ReadableBuffer, StrOrBytesPath, SupportsLenAndGetItem, Unused from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from datetime import date, datetime, time from types import TracebackType from typing import Any, Protocol, TypeVar, overload -from typing_extensions import Literal, SupportsIndex, TypeAlias, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final _T = TypeVar("_T") _CursorT = TypeVar("_CursorT", bound=Cursor) @@ -217,7 +217,7 @@ def enable_callback_tracebacks(__enable: bool) -> None: ... # takes a pos-or-keyword argument because there is a C wrapper def enable_shared_cache(enable: int) -> None: ... -if sys.version_info >= (3, 11): +if sys.version_info >= (3, 10): def register_adapter(__type: type[_T], __adapter: _Adapter[_T]) -> None: ... def register_converter(__typename: str, __converter: _Converter) -> None: ... @@ -227,9 +227,9 @@ else: if sys.version_info < (3, 8): class Cache: - def __init__(self, *args, **kwargs) -> None: ... - def display(self, *args, **kwargs) -> None: ... - def get(self, *args, **kwargs) -> None: ... + def __init__(self, *args: Incomplete, **kwargs: Unused) -> None: ... + def display(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... + def get(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... class _AggregateProtocol(Protocol): def step(self, __value: int) -> object: ... @@ -294,7 +294,7 @@ class Connection: ) -> None: ... def close(self) -> None: ... if sys.version_info >= (3, 11): - def blobopen(self, __table: str, __column: str, __row: int, *, readonly: bool = ..., name: str = ...) -> Blob: ... + def blobopen(self, __table: str, __column: str, __row: int, *, readonly: bool = False, name: str = "main") -> Blob: ... def commit(self) -> None: ... def create_aggregate(self, name: str, n_arg: int, aggregate_class: Callable[[], _AggregateProtocol]) -> None: ... @@ -318,13 +318,13 @@ class Connection: def create_collation(self, __name: str, __callback: Callable[[str, str], int | SupportsIndex] | None) -> None: ... if sys.version_info >= (3, 8): def create_function( - self, name: str, narg: int, func: Callable[..., _SqliteData], *, deterministic: bool = ... + self, name: str, narg: int, func: Callable[..., _SqliteData] | None, *, deterministic: bool = False ) -> None: ... else: - def create_function(self, name: str, num_params: int, func: Callable[..., _SqliteData]) -> None: ... + def create_function(self, name: str, num_params: int, func: Callable[..., _SqliteData] | None) -> None: ... @overload - def cursor(self, cursorClass: None = ...) -> Cursor: ... + def cursor(self, cursorClass: None = None) -> Cursor: ... @overload def cursor(self, cursorClass: Callable[[], _CursorT]) -> _CursorT: ... def execute(self, sql: str, parameters: _Parameters = ...) -> Cursor: ... @@ -346,19 +346,19 @@ class Connection: self, target: Connection, *, - pages: int = ..., - progress: Callable[[int, int, int], object] | None = ..., - name: str = ..., - sleep: float = ..., + pages: int = -1, + progress: Callable[[int, int, int], object] | None = None, + name: str = "main", + sleep: float = 0.25, ) -> None: ... if sys.version_info >= (3, 11): def setlimit(self, __category: int, __limit: int) -> int: ... def getlimit(self, __category: int) -> int: ... - def serialize(self, *, name: str = ...) -> bytes: ... - def deserialize(self, __data: ReadableBuffer, *, name: str = ...) -> None: ... + def serialize(self, *, name: str = "main") -> bytes: ... + def deserialize(self, __data: ReadableBuffer, *, name: str = "main") -> None: ... def __call__(self, __sql: str) -> _Statement: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, __type: type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None ) -> Literal[False]: ... @@ -377,17 +377,17 @@ class Cursor(Iterator[Any]): def rowcount(self) -> int: ... def __init__(self, __cursor: Connection) -> None: ... def close(self) -> None: ... - def execute(self: Self, __sql: str, __parameters: _Parameters = ...) -> Self: ... - def executemany(self: Self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... + def execute(self, __sql: str, __parameters: _Parameters = ...) -> Self: ... + def executemany(self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... def executescript(self, __sql_script: str) -> Cursor: ... def fetchall(self) -> list[Any]: ... - def fetchmany(self, size: int | None = ...) -> list[Any]: ... + def fetchmany(self, size: int | None = 1) -> list[Any]: ... # Returns either a row (as created by the row_factory) or None, but # putting None in the return annotation causes annoying false positives. def fetchone(self) -> Any: ... - def setinputsizes(self, __sizes: object) -> None: ... # does nothing - def setoutputsize(self, __size: object, __column: object = ...) -> None: ... # does nothing - def __iter__(self: Self) -> Self: ... + def setinputsizes(self, __sizes: Unused) -> None: ... # does nothing + def setoutputsize(self, __size: Unused, __column: Unused = None) -> None: ... # does nothing + def __iter__(self) -> Self: ... def __next__(self) -> Any: ... class DataError(DatabaseError): ... @@ -437,7 +437,7 @@ if sys.version_info >= (3, 8): else: @final class Statement: - def __init__(self, *args, **kwargs): ... + def __init__(self, *args: Incomplete, **kwargs: Incomplete) -> None: ... _Statement: TypeAlias = Statement class Warning(Exception): ... @@ -446,13 +446,13 @@ if sys.version_info >= (3, 11): @final class Blob: def close(self) -> None: ... - def read(self, __length: int = ...) -> bytes: ... - def write(self, __data: bytes) -> None: ... + def read(self, __length: int = -1) -> bytes: ... + def write(self, __data: ReadableBuffer) -> None: ... def tell(self) -> int: ... # whence must be one of os.SEEK_SET, os.SEEK_CUR, os.SEEK_END - def seek(self, __offset: int, __origin: int = ...) -> None: ... + def seek(self, __offset: int, __origin: int = 0) -> None: ... def __len__(self) -> int: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, __typ: object, __val: object, __tb: object) -> Literal[False]: ... def __getitem__(self, __item: SupportsIndex | slice) -> int: ... def __setitem__(self, __item: SupportsIndex | slice, __value: int) -> None: ... diff --git a/mypy/typeshed/stdlib/sre_compile.pyi b/mypy/typeshed/stdlib/sre_compile.pyi index a9f4d577d5d1..2d04a886c931 100644 --- a/mypy/typeshed/stdlib/sre_compile.pyi +++ b/mypy/typeshed/stdlib/sre_compile.pyi @@ -8,4 +8,4 @@ MAXCODE: int def dis(code: list[_NamedIntConstant]) -> None: ... def isstring(obj: Any) -> bool: ... -def compile(p: str | bytes | SubPattern, flags: int = ...) -> Pattern[Any]: ... +def compile(p: str | bytes | SubPattern, flags: int = 0) -> Pattern[Any]: ... diff --git a/mypy/typeshed/stdlib/sre_constants.pyi b/mypy/typeshed/stdlib/sre_constants.pyi index e7344fae3798..d522372c438c 100644 --- a/mypy/typeshed/stdlib/sre_constants.pyi +++ b/mypy/typeshed/stdlib/sre_constants.pyi @@ -1,6 +1,6 @@ import sys -from _typeshed import Self from typing import Any +from typing_extensions import Self MAXGROUPS: int @@ -12,11 +12,11 @@ class error(Exception): pos: int | None lineno: int colno: int - def __init__(self, msg: str, pattern: str | bytes | None = ..., pos: int | None = ...) -> None: ... + def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... class _NamedIntConstant(int): name: Any - def __new__(cls: type[Self], value: int, name: str) -> Self: ... + def __new__(cls, value: int, name: str) -> Self: ... MAXREPEAT: _NamedIntConstant OPCODES: list[_NamedIntConstant] @@ -79,6 +79,10 @@ REPEAT: _NamedIntConstant REPEAT_ONE: _NamedIntConstant SUBPATTERN: _NamedIntConstant MIN_REPEAT_ONE: _NamedIntConstant +if sys.version_info >= (3, 11): + ATOMIC_GROUP: _NamedIntConstant + POSSESSIVE_REPEAT: _NamedIntConstant + POSSESSIVE_REPEAT_ONE: _NamedIntConstant RANGE_UNI_IGNORE: _NamedIntConstant GROUPREF_LOC_IGNORE: _NamedIntConstant GROUPREF_UNI_IGNORE: _NamedIntConstant diff --git a/mypy/typeshed/stdlib/sre_parse.pyi b/mypy/typeshed/stdlib/sre_parse.pyi index e4d66d1baf52..56f10bb41d57 100644 --- a/mypy/typeshed/stdlib/sre_parse.pyi +++ b/mypy/typeshed/stdlib/sre_parse.pyi @@ -27,7 +27,6 @@ class _State: groupdict: dict[str, int] groupwidths: list[int | None] lookbehindgroups: int | None - def __init__(self) -> None: ... @property def groups(self) -> int: ... def opengroup(self, name: str | None = ...) -> int: ... @@ -53,12 +52,12 @@ class SubPattern: if sys.version_info >= (3, 8): state: State - def __init__(self, state: State, data: list[_CodeType] | None = ...) -> None: ... + def __init__(self, state: State, data: list[_CodeType] | None = None) -> None: ... else: pattern: Pattern - def __init__(self, pattern: Pattern, data: list[_CodeType] | None = ...) -> None: ... + def __init__(self, pattern: Pattern, data: list[_CodeType] | None = None) -> None: ... - def dump(self, level: int = ...) -> None: ... + def dump(self, level: int = 0) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: int | slice) -> None: ... def __getitem__(self, index: int | slice) -> SubPattern | _CodeType: ... @@ -86,7 +85,7 @@ class Tokenizer: def pos(self) -> int: ... def tell(self) -> int: ... def seek(self, index: int) -> None: ... - def error(self, msg: str, offset: int = ...) -> _Error: ... + def error(self, msg: str, offset: int = 0) -> _Error: ... if sys.version_info >= (3, 11): def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... @@ -96,14 +95,14 @@ def fix_flags(src: str | bytes, flags: int) -> int: ... _TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] _TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]] if sys.version_info >= (3, 8): - def parse(str: str, flags: int = ..., state: State | None = ...) -> SubPattern: ... + def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... @overload def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... @overload def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ... else: - def parse(str: str, flags: int = ..., pattern: Pattern | None = ...) -> SubPattern: ... + def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ... @overload def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... @overload diff --git a/mypy/typeshed/stdlib/ssl.pyi b/mypy/typeshed/stdlib/ssl.pyi index 09c8d07780a7..bbf8a4c6d65a 100644 --- a/mypy/typeshed/stdlib/ssl.pyi +++ b/mypy/typeshed/stdlib/ssl.pyi @@ -1,17 +1,17 @@ import enum import socket import sys -from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer from collections.abc import Callable, Iterable -from typing import Any, NamedTuple, Union, overload -from typing_extensions import Literal, TypeAlias, TypedDict, final +from typing import Any, NamedTuple, overload +from typing_extensions import Literal, Self, TypeAlias, TypedDict, final _PCTRTT: TypeAlias = tuple[tuple[str, str], ...] _PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] _PeerCertRetDictType: TypeAlias = dict[str, str | _PCTRTTT | _PCTRTT] _PeerCertRetType: TypeAlias = _PeerCertRetDictType | bytes | None _EnumRetType: TypeAlias = list[tuple[bytes, str, set[str] | bool]] -_PasswordType: TypeAlias = Union[Callable[[], str | bytes], str, bytes] +_PasswordType: TypeAlias = Callable[[], str | bytes | bytearray] | str | bytes | bytearray _SrvnmeCbType: TypeAlias = Callable[[SSLSocket | SSLObject, str | None, SSLSocket], int | None] @@ -46,55 +46,74 @@ CertificateError = SSLCertVerificationError def wrap_socket( sock: socket.socket, - keyfile: StrOrBytesPath | None = ..., - certfile: StrOrBytesPath | None = ..., - server_side: bool = ..., + keyfile: StrOrBytesPath | None = None, + certfile: StrOrBytesPath | None = None, + server_side: bool = False, cert_reqs: int = ..., ssl_version: int = ..., - ca_certs: str | None = ..., - do_handshake_on_connect: bool = ..., - suppress_ragged_eofs: bool = ..., - ciphers: str | None = ..., + ca_certs: str | None = None, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + ciphers: str | None = None, ) -> SSLSocket: ... def create_default_context( purpose: Purpose = ..., *, - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | bytes | None = ..., -) -> SSLContext: ... -def _create_unverified_context( - protocol: int = ..., - *, - cert_reqs: int = ..., - check_hostname: bool = ..., - purpose: Purpose = ..., - certfile: StrOrBytesPath | None = ..., - keyfile: StrOrBytesPath | None = ..., - cafile: StrOrBytesPath | None = ..., - capath: StrOrBytesPath | None = ..., - cadata: str | bytes | None = ..., + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, ) -> SSLContext: ... +if sys.version_info >= (3, 10): + def _create_unverified_context( + protocol: int | None = None, + *, + cert_reqs: int = ..., + check_hostname: bool = False, + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + +else: + def _create_unverified_context( + protocol: int = ..., + *, + cert_reqs: int = ..., + check_hostname: bool = False, + purpose: Purpose = ..., + certfile: StrOrBytesPath | None = None, + keyfile: StrOrBytesPath | None = None, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, + ) -> SSLContext: ... + _create_default_https_context: Callable[..., SSLContext] def RAND_bytes(__num: int) -> bytes: ... def RAND_pseudo_bytes(__num: int) -> tuple[bytes, bool]: ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... -def RAND_add(__s: bytes, __entropy: float) -> None: ... -def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... +def RAND_add(__string: str | ReadableBuffer, __entropy: float) -> None: ... + +if sys.version_info < (3, 12): + def match_hostname(cert: _PeerCertRetDictType, hostname: str) -> None: ... + def cert_time_to_seconds(cert_time: str) -> int: ... if sys.version_info >= (3, 10): def get_server_certificate( - addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ..., timeout: float = ... + addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None, timeout: float = ... ) -> str: ... else: - def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ...) -> str: ... + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = None) -> str: ... -def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... +def DER_cert_to_PEM_cert(der_cert_bytes: ReadableBuffer) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... class DefaultVerifyPaths(NamedTuple): @@ -177,6 +196,8 @@ class Options(enum.IntFlag): OP_NO_RENEGOTIATION: int if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: int + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: int OP_ALL: Options OP_NO_SSLv2: Options @@ -193,6 +214,8 @@ OP_NO_TICKET: Options OP_NO_RENEGOTIATION: Options if sys.version_info >= (3, 8): OP_ENABLE_MIDDLEBOX_COMPAT: Options + if sys.platform == "linux": + OP_IGNORE_UNEXPECTED_EOF: Options HAS_NEVER_CHECK_COMMON_NAME: bool HAS_SSLv2: bool @@ -274,9 +297,9 @@ class _ASN1Object(NamedTuple): longname: str oid: str @classmethod - def fromnid(cls: type[Self], nid: int) -> Self: ... + def fromnid(cls, nid: int) -> Self: ... @classmethod - def fromname(cls: type[Self], name: str) -> Self: ... + def fromname(cls, name: str) -> Self: ... class Purpose(_ASN1Object, enum.Enum): SERVER_AUTH: _ASN1Object @@ -290,26 +313,26 @@ class SSLSocket(socket.socket): @property def session_reused(self) -> bool | None: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def connect(self, addr: socket._Address | bytes) -> None: ... - def connect_ex(self, addr: socket._Address | bytes) -> int: ... - def recv(self, buflen: int = ..., flags: int = ...) -> bytes: ... - def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = ..., flags: int = ...) -> int: ... - def recvfrom(self, buflen: int = ..., flags: int = ...) -> tuple[bytes, socket._RetAddress]: ... + def connect(self, addr: socket._Address) -> None: ... + def connect_ex(self, addr: socket._Address) -> int: ... + def recv(self, buflen: int = 1024, flags: int = 0) -> bytes: ... + def recv_into(self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0) -> int: ... + def recvfrom(self, buflen: int = 1024, flags: int = 0) -> tuple[bytes, socket._RetAddress]: ... def recvfrom_into( - self, buffer: WriteableBuffer, nbytes: int | None = ..., flags: int = ... + self, buffer: WriteableBuffer, nbytes: int | None = None, flags: int = 0 ) -> tuple[int, socket._RetAddress]: ... - def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... - def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... + def send(self, data: ReadableBuffer, flags: int = 0) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = 0) -> None: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: socket._Address, addr: None = None) -> int: ... @overload - def sendto(self, data: ReadableBuffer, flags_or_addr: int | socket._Address, addr: socket._Address | None = ...) -> int: ... + def sendto(self, data: ReadableBuffer, flags_or_addr: int, addr: socket._Address) -> int: ... def shutdown(self, how: int) -> None: ... - def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... - def write(self, data: bytes) -> int: ... - def do_handshake(self, block: bool = ...) -> None: ... # block is undocumented + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... + def do_handshake(self, block: bool = False) -> None: ... # block is undocumented @overload - def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload @@ -317,7 +340,7 @@ class SSLSocket(socket.socket): def cipher(self) -> tuple[str, str, int] | None: ... def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... def compression(self) -> str | None: ... - def get_channel_binding(self, cb_type: str = ...) -> bytes | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... def selected_alpn_protocol(self) -> str | None: ... def selected_npn_protocol(self) -> str | None: ... def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... @@ -355,22 +378,32 @@ class SSLContext: if sys.version_info >= (3, 8): keylog_filename: str post_handshake_auth: bool - def __new__(cls: type[Self], protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... - def __init__(self, protocol: int = ...) -> None: ... + if sys.version_info >= (3, 10): + security_level: int + if sys.version_info >= (3, 10): + # Using the default (None) for the `protocol` parameter is deprecated, + # but there isn't a good way of marking that in the stub unless/until PEP 702 is accepted + def __new__(cls, protocol: int | None = None, *args: Any, **kwargs: Any) -> Self: ... + else: + def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... + def cert_store_stats(self) -> dict[str, int]: ... def load_cert_chain( - self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = ..., password: _PasswordType | None = ... + self, certfile: StrOrBytesPath, keyfile: StrOrBytesPath | None = None, password: _PasswordType | None = None ) -> None: ... def load_default_certs(self, purpose: Purpose = ...) -> None: ... def load_verify_locations( - self, cafile: StrOrBytesPath | None = ..., capath: StrOrBytesPath | None = ..., cadata: str | bytes | None = ... + self, + cafile: StrOrBytesPath | None = None, + capath: StrOrBytesPath | None = None, + cadata: str | ReadableBuffer | None = None, ) -> None: ... @overload - def get_ca_certs(self, binary_form: Literal[False] = ...) -> list[_PeerCertRetDictType]: ... + def get_ca_certs(self, binary_form: Literal[False] = False) -> list[_PeerCertRetDictType]: ... @overload def get_ca_certs(self, binary_form: Literal[True]) -> list[bytes]: ... @overload - def get_ca_certs(self, binary_form: bool = ...) -> Any: ... + def get_ca_certs(self, binary_form: bool = False) -> Any: ... def get_ciphers(self) -> list[_Cipher]: ... def set_default_verify_paths(self) -> None: ... def set_ciphers(self, __cipherlist: str) -> None: ... @@ -382,19 +415,19 @@ class SSLContext: def wrap_socket( self, sock: socket.socket, - server_side: bool = ..., - do_handshake_on_connect: bool = ..., - suppress_ragged_eofs: bool = ..., - server_hostname: str | None = ..., - session: SSLSession | None = ..., + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | None = None, + session: SSLSession | None = None, ) -> SSLSocket: ... def wrap_bio( self, incoming: MemoryBIO, outgoing: MemoryBIO, - server_side: bool = ..., - server_hostname: str | None = ..., - session: SSLSession | None = ..., + server_side: bool = False, + server_hostname: str | None = None, + session: SSLSession | None = None, ) -> SSLObject: ... def session_stats(self) -> dict[str, int]: ... @@ -408,10 +441,10 @@ class SSLObject: @property def session_reused(self) -> bool: ... def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... - def write(self, data: bytes) -> int: ... + def read(self, len: int = 1024, buffer: bytearray | None = None) -> bytes: ... + def write(self, data: ReadableBuffer) -> int: ... @overload - def getpeercert(self, binary_form: Literal[False] = ...) -> _PeerCertRetDictType | None: ... + def getpeercert(self, binary_form: Literal[False] = False) -> _PeerCertRetDictType | None: ... @overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload @@ -425,7 +458,7 @@ class SSLObject: def do_handshake(self) -> None: ... def unwrap(self) -> None: ... def version(self) -> str | None: ... - def get_channel_binding(self, cb_type: str = ...) -> bytes | None: ... + def get_channel_binding(self, cb_type: str = "tls-unique") -> bytes | None: ... if sys.version_info >= (3, 8): def verify_client_post_handshake(self) -> None: ... @@ -433,17 +466,22 @@ class SSLObject: class MemoryBIO: pending: int eof: bool - def read(self, __size: int = ...) -> bytes: ... - def write(self, __buf: bytes) -> int: ... + def read(self, __size: int = -1) -> bytes: ... + def write(self, __buf: ReadableBuffer) -> int: ... def write_eof(self) -> None: ... @final class SSLSession: - id: bytes - time: int - timeout: int - ticket_lifetime_hint: int - has_ticket: bool + @property + def has_ticket(self) -> bool: ... + @property + def id(self) -> bytes: ... + @property + def ticket_lifetime_hint(self) -> int: ... + @property + def time(self) -> int: ... + @property + def timeout(self) -> int: ... class SSLErrorNumber(enum.IntEnum): SSL_ERROR_EOF: int diff --git a/mypy/typeshed/stdlib/statistics.pyi b/mypy/typeshed/stdlib/statistics.pyi index a01665ad8227..1358b1f90d7d 100644 --- a/mypy/typeshed/stdlib/statistics.pyi +++ b/mypy/typeshed/stdlib/statistics.pyi @@ -1,10 +1,10 @@ import sys -from _typeshed import Self, SupportsRichComparisonT +from _typeshed import SupportsRichComparisonT from collections.abc import Hashable, Iterable, Sequence from decimal import Decimal from fractions import Fraction from typing import Any, NamedTuple, SupportsFloat, TypeVar -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "StatisticsError", @@ -37,7 +37,7 @@ _HashableT = TypeVar("_HashableT", bound=Hashable) class StatisticsError(ValueError): ... if sys.version_info >= (3, 11): - def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = ...) -> float: ... + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = None) -> float: ... elif sys.version_info >= (3, 8): def fmean(data: Iterable[SupportsFloat]) -> float: ... @@ -48,7 +48,7 @@ if sys.version_info >= (3, 8): def mean(data: Iterable[_NumberT]) -> _NumberT: ... if sys.version_info >= (3, 10): - def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = ...) -> _NumberT: ... + def harmonic_mean(data: Iterable[_NumberT], weights: Iterable[_Number] | None = None) -> _NumberT: ... else: def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... @@ -58,30 +58,30 @@ def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichCompariso def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... if sys.version_info >= (3, 11): - def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = ...) -> float: ... + def median_grouped(data: Iterable[SupportsFloat], interval: SupportsFloat = 1.0) -> float: ... else: - def median_grouped(data: Iterable[_NumberT], interval: _NumberT = ...) -> _NumberT | float: ... + def median_grouped(data: Iterable[_NumberT], interval: _NumberT | float = 1) -> _NumberT | float: ... def mode(data: Iterable[_HashableT]) -> _HashableT: ... if sys.version_info >= (3, 8): def multimode(data: Iterable[_HashableT]) -> list[_HashableT]: ... -def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = ...) -> _NumberT: ... -def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = ...) -> _NumberT: ... +def pstdev(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... +def pvariance(data: Iterable[_NumberT], mu: _NumberT | None = None) -> _NumberT: ... if sys.version_info >= (3, 8): def quantiles( - data: Iterable[_NumberT], *, n: int = ..., method: Literal["inclusive", "exclusive"] = ... + data: Iterable[_NumberT], *, n: int = 4, method: Literal["inclusive", "exclusive"] = "exclusive" ) -> list[_NumberT]: ... -def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = ...) -> _NumberT: ... -def variance(data: Iterable[_NumberT], xbar: _NumberT | None = ...) -> _NumberT: ... +def stdev(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... +def variance(data: Iterable[_NumberT], xbar: _NumberT | None = None) -> _NumberT: ... if sys.version_info >= (3, 8): class NormalDist: - def __init__(self, mu: float = ..., sigma: float = ...) -> None: ... + def __init__(self, mu: float = 0.0, sigma: float = 1.0) -> None: ... @property def mean(self) -> float: ... @property @@ -93,13 +93,13 @@ if sys.version_info >= (3, 8): @property def variance(self) -> float: ... @classmethod - def from_samples(cls: type[Self], data: Iterable[SupportsFloat]) -> Self: ... - def samples(self, n: int, *, seed: Any | None = ...) -> list[float]: ... + def from_samples(cls, data: Iterable[SupportsFloat]) -> Self: ... + def samples(self, n: int, *, seed: Any | None = None) -> list[float]: ... def pdf(self, x: float) -> float: ... def cdf(self, x: float) -> float: ... def inv_cdf(self, p: float) -> float: ... def overlap(self, other: NormalDist) -> float: ... - def quantiles(self, n: int = ...) -> list[float]: ... + def quantiles(self, n: int = 4) -> list[float]: ... if sys.version_info >= (3, 9): def zscore(self, x: float) -> float: ... @@ -124,7 +124,7 @@ if sys.version_info >= (3, 10): if sys.version_info >= (3, 11): def linear_regression( - __regressor: Sequence[_Number], __dependent_variable: Sequence[_Number], *, proportional: bool = ... + __regressor: Sequence[_Number], __dependent_variable: Sequence[_Number], *, proportional: bool = False ) -> LinearRegression: ... elif sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/string.pyi b/mypy/typeshed/stdlib/string.pyi index 1b9ba5b58fa1..dc9a449e0e39 100644 --- a/mypy/typeshed/stdlib/string.pyi +++ b/mypy/typeshed/stdlib/string.pyi @@ -2,8 +2,8 @@ import sys from _typeshed import StrOrLiteralStr from collections.abc import Iterable, Mapping, Sequence from re import Pattern, RegexFlag -from typing import Any, overload -from typing_extensions import LiteralString +from typing import Any, ClassVar, overload +from typing_extensions import LiteralString, TypeAlias __all__ = [ "ascii_letters", @@ -30,15 +30,22 @@ punctuation: LiteralString printable: LiteralString whitespace: LiteralString -def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = ...) -> StrOrLiteralStr: ... +def capwords(s: StrOrLiteralStr, sep: StrOrLiteralStr | None = None) -> StrOrLiteralStr: ... -class Template: +if sys.version_info >= (3, 9): + _TemplateMetaclass: TypeAlias = type +else: + class _TemplateMetaclass(type): + pattern: ClassVar[str] + def __init__(cls, name: str, bases: tuple[type, ...], dct: dict[str, Any]) -> None: ... + +class Template(metaclass=_TemplateMetaclass): template: str - delimiter: str - idpattern: str - braceidpattern: str | None - flags: RegexFlag - pattern: Pattern[str] + delimiter: ClassVar[str] + idpattern: ClassVar[str] + braceidpattern: ClassVar[str | None] + flags: ClassVar[RegexFlag] + pattern: ClassVar[Pattern[str]] def __init__(self, template: str) -> None: ... def substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... def safe_substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... @@ -57,11 +64,20 @@ class Formatter: ) -> LiteralString: ... @overload def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... + def _vformat( # undocumented + self, + format_string: str, + args: Sequence[Any], + kwargs: Mapping[str, Any], + used_args: set[int | str], + recursion_depth: int, + auto_arg_index: int = 0, + ) -> tuple[str, int]: ... def parse( self, format_string: StrOrLiteralStr ) -> Iterable[tuple[StrOrLiteralStr, StrOrLiteralStr | None, StrOrLiteralStr | None, StrOrLiteralStr | None]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... - def check_unused_args(self, used_args: Sequence[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... + def check_unused_args(self, used_args: set[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... def format_field(self, value: Any, format_spec: str) -> Any: ... def convert_field(self, value: Any, conversion: str) -> Any: ... diff --git a/mypy/typeshed/stdlib/struct.pyi b/mypy/typeshed/stdlib/struct.pyi index f7eff2b76f14..4220cd825b76 100644 --- a/mypy/typeshed/stdlib/struct.pyi +++ b/mypy/typeshed/stdlib/struct.pyi @@ -6,19 +6,21 @@ __all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpac class error(Exception): ... -def pack(fmt: str | bytes, *v: Any) -> bytes: ... -def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... +def pack(__fmt: str | bytes, *v: Any) -> bytes: ... +def pack_into(__fmt: str | bytes, __buffer: WriteableBuffer, __offset: int, *v: Any) -> None: ... def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... -def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... +def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... def calcsize(__format: str | bytes) -> int: ... class Struct: - format: str - size: int + @property + def format(self) -> str: ... + @property + def size(self) -> int: ... def __init__(self, format: str | bytes) -> None: ... def pack(self, *v: Any) -> bytes: ... def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... def unpack(self, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... - def unpack_from(self, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... + def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ... def iter_unpack(self, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index fded3f74928e..3940fad7b915 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import Self, StrOrBytesPath -from collections.abc import Callable, Iterable, Mapping, Sequence +from _typeshed import ReadableBuffer, StrOrBytesPath +from collections.abc import Callable, Collection, Iterable, Mapping, Sequence from types import TracebackType from typing import IO, Any, AnyStr, Generic, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -63,13 +63,13 @@ if sys.platform == "win32": # except TimeoutError as e: # reveal_type(e.cmd) # Any, but morally is _CMD _FILE: TypeAlias = None | int | IO[Any] -_TXT: TypeAlias = bytes | str +_InputString: TypeAlias = ReadableBuffer | str if sys.version_info >= (3, 8): _CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath] else: # Python 3.7 doesn't support _CMD being a single PathLike. # See: https://bugs.python.org/issue31961 - _CMD: TypeAlias = _TXT | Sequence[StrOrBytesPath] + _CMD: TypeAlias = str | bytes | Sequence[StrOrBytesPath] if sys.platform == "win32": _ENV: TypeAlias = Mapping[str, str] else: @@ -91,14 +91,7 @@ class CompletedProcess(Generic[_T]): # and writing all the overloads would be horrific. stdout: _T stderr: _T - # pyright ignore on __init__ because the TypeVar can technically be unsolved, but see comment above - def __init__( - self, - args: _CMD, - returncode: int, - stdout: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] - stderr: _T | None = ..., # pyright: ignore[reportInvalidTypeVarUse] - ) -> None: ... + def __init__(self, args: _CMD, returncode: int, stdout: _T | None = None, stderr: _T | None = None) -> None: ... def check_returncode(self) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -118,20 +111,20 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -152,20 +145,20 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -186,20 +179,20 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -226,15 +219,15 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -255,20 +248,20 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., - encoding: None = ..., - errors: None = ..., - input: bytes | None = ..., + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -289,20 +282,20 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -326,20 +319,20 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -359,20 +352,20 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -392,20 +385,20 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -431,15 +424,15 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -459,20 +452,20 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., - encoding: None = ..., - errors: None = ..., - input: bytes | None = ..., + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -492,20 +485,20 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -528,20 +521,20 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -560,20 +553,20 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -592,20 +585,20 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -630,15 +623,15 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -657,20 +650,20 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., - encoding: None = ..., - errors: None = ..., - input: bytes | None = ..., + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -689,20 +682,20 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, user: str | int | None = ..., group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., @@ -723,20 +716,20 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: Literal[True], - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -751,20 +744,20 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str, errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -779,20 +772,20 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str, - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -813,15 +806,15 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: str | None = ..., + input: str | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[str]: ... @overload def run( @@ -836,20 +829,20 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., - encoding: None = ..., - errors: None = ..., - input: bytes | None = ..., + capture_output: bool = False, + check: bool = False, + encoding: None = None, + errors: None = None, + input: ReadableBuffer | None = None, text: Literal[None, False] = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[bytes]: ... @overload def run( @@ -864,20 +857,20 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - capture_output: bool = ..., - check: bool = ..., + capture_output: bool = False, + check: bool = False, encoding: str | None = ..., errors: str | None = ..., - input: _TXT | None = ..., + input: _InputString | None = None, text: bool | None = ..., - timeout: float | None = ..., + timeout: float | None = None, ) -> CompletedProcess[Any]: ... # Same args as Popen.__init__ @@ -895,14 +888,14 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -926,14 +919,14 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -956,14 +949,14 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -984,14 +977,14 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., + timeout: float | None = None, text: bool | None = ..., ) -> int: ... @@ -1001,7 +994,7 @@ if sys.version_info >= (3, 11): def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1010,12 +1003,12 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1032,7 +1025,7 @@ elif sys.version_info >= (3, 10): def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1041,12 +1034,12 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1062,7 +1055,7 @@ elif sys.version_info >= (3, 9): def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1071,12 +1064,12 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1090,7 +1083,7 @@ else: def check_call( args: _CMD, bufsize: int = ..., - executable: StrOrBytesPath = ..., + executable: StrOrBytesPath | None = None, stdin: _FILE = ..., stdout: _FILE = ..., stderr: _FILE = ..., @@ -1099,12 +1092,12 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., timeout: float | None = ..., *, text: bool | None = ..., @@ -1124,15 +1117,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1155,15 +1148,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1186,15 +1179,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1223,10 +1216,10 @@ if sys.version_info >= (3, 11): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1249,17 +1242,17 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., - encoding: None = ..., - errors: None = ..., + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -1280,15 +1273,15 @@ if sys.version_info >= (3, 11): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1298,7 +1291,7 @@ if sys.version_info >= (3, 11): umask: int = ..., pipesize: int = ..., process_group: int | None = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 10): # 3.10 adds "pipesize" argument @@ -1314,15 +1307,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1344,15 +1337,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1374,15 +1367,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1410,10 +1403,10 @@ elif sys.version_info >= (3, 10): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1435,17 +1428,17 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., - encoding: None = ..., - errors: None = ..., + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -1465,15 +1458,15 @@ elif sys.version_info >= (3, 10): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1482,7 +1475,7 @@ elif sys.version_info >= (3, 10): extra_groups: Iterable[str | int] | None = ..., umask: int = ..., pipesize: int = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes elif sys.version_info >= (3, 9): # 3.9 adds arguments "user", "group", "extra_groups" and "umask" @@ -1498,15 +1491,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1527,15 +1520,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1556,15 +1549,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1591,10 +1584,10 @@ elif sys.version_info >= (3, 9): creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1615,17 +1608,17 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., - encoding: None = ..., - errors: None = ..., + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., user: str | int | None = ..., group: str | int | None = ..., @@ -1644,15 +1637,15 @@ elif sys.version_info >= (3, 9): shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1660,7 +1653,7 @@ elif sys.version_info >= (3, 9): group: str | int | None = ..., extra_groups: Iterable[str | int] | None = ..., umask: int = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes else: @overload @@ -1675,15 +1668,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: Literal[True], @@ -1700,15 +1693,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str, errors: str | None = ..., text: bool | None = ..., @@ -1725,15 +1718,15 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str, text: bool | None = ..., @@ -1756,10 +1749,10 @@ else: creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., # where the real keyword only ones start - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., @@ -1776,17 +1769,17 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., + universal_newlines: Literal[False, None] = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., - encoding: None = ..., - errors: None = ..., + timeout: float | None = None, + input: _InputString | None = ..., + encoding: None = None, + errors: None = None, text: Literal[None, False] = ..., ) -> bytes: ... @overload @@ -1801,19 +1794,19 @@ else: shell: bool = ..., cwd: StrOrBytesPath | None = ..., env: _ENV | None = ..., - universal_newlines: bool = ..., + universal_newlines: bool | None = ..., startupinfo: Any = ..., creationflags: int = ..., restore_signals: bool = ..., start_new_session: bool = ..., - pass_fds: Any = ..., + pass_fds: Collection[int] = ..., *, - timeout: float | None = ..., - input: _TXT | None = ..., + timeout: float | None = None, + input: _InputString | None = ..., encoding: str | None = ..., errors: str | None = ..., text: bool | None = ..., - ) -> Any: ... # morally: -> _TXT + ) -> Any: ... # morally: -> str | bytes PIPE: int STDOUT: int @@ -1822,26 +1815,30 @@ DEVNULL: int class SubprocessError(Exception): ... class TimeoutExpired(SubprocessError): - def __init__(self, cmd: _CMD, timeout: float, output: _TXT | None = ..., stderr: _TXT | None = ...) -> None: ... + def __init__( + self, cmd: _CMD, timeout: float, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... # morally: _CMD cmd: Any timeout: float - # morally: _TXT | None + # morally: str | bytes | None output: Any - stdout: Any - stderr: Any + stdout: bytes | None + stderr: bytes | None class CalledProcessError(SubprocessError): returncode: int # morally: _CMD cmd: Any - # morally: _TXT | None + # morally: str | bytes | None output: Any - # morally: _TXT | None + # morally: str | bytes | None stdout: Any stderr: Any - def __init__(self, returncode: int, cmd: _CMD, output: _TXT | None = ..., stderr: _TXT | None = ...) -> None: ... + def __init__( + self, returncode: int, cmd: _CMD, output: str | bytes | None = None, stderr: str | bytes | None = None + ) -> None: ... class Popen(Generic[AnyStr]): args: _CMD @@ -1858,188 +1855,188 @@ class Popen(Generic[AnyStr]): def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., - process_group: int | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, + process_group: int | None = None, ) -> None: ... elif sys.version_info >= (3, 10): # pipesize is added in 3.10 @@ -2047,182 +2044,182 @@ class Popen(Generic[AnyStr]): def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., - pipesize: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, + pipesize: int = -1, ) -> None: ... elif sys.version_info >= (3, 9): # user, group, extra_groups, umask were added in 3.9 @@ -2230,343 +2227,340 @@ class Popen(Generic[AnyStr]): def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., - user: str | int | None = ..., - group: str | int | None = ..., - extra_groups: Iterable[str | int] | None = ..., - umask: int = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, ) -> None: ... else: @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., + text: bool | None = None, encoding: str, - errors: str | None = ..., + errors: str | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., + text: bool | None = None, + encoding: str | None = None, errors: str, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, *, universal_newlines: Literal[True], - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., # where the *real* keyword only args start - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self: Popen[str], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, text: Literal[True], - encoding: str | None = ..., - errors: str | None = ..., + encoding: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self: Popen[bytes], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: Literal[False] = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: Literal[False, None] = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: Literal[None, False] = ..., - encoding: None = ..., - errors: None = ..., + text: Literal[None, False] = None, + encoding: None = None, + errors: None = None, ) -> None: ... @overload def __init__( self: Popen[Any], args: _CMD, - bufsize: int = ..., - executable: StrOrBytesPath | None = ..., - stdin: _FILE | None = ..., - stdout: _FILE | None = ..., - stderr: _FILE | None = ..., - preexec_fn: Callable[[], Any] | None = ..., - close_fds: bool = ..., - shell: bool = ..., - cwd: StrOrBytesPath | None = ..., - env: _ENV | None = ..., - universal_newlines: bool = ..., - startupinfo: Any | None = ..., - creationflags: int = ..., - restore_signals: bool = ..., - start_new_session: bool = ..., - pass_fds: Any = ..., + bufsize: int = -1, + executable: StrOrBytesPath | None = None, + stdin: _FILE | None = None, + stdout: _FILE | None = None, + stderr: _FILE | None = None, + preexec_fn: Callable[[], Any] | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: _ENV | None = None, + universal_newlines: bool | None = None, + startupinfo: Any | None = None, + creationflags: int = 0, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Collection[int] = ..., *, - text: bool | None = ..., - encoding: str | None = ..., - errors: str | None = ..., + text: bool | None = None, + encoding: str | None = None, + errors: str | None = None, ) -> None: ... def poll(self) -> int | None: ... - def wait(self, timeout: float | None = ...) -> int: ... - # Return str/bytes - def communicate( - self, - input: AnyStr | None = ..., - timeout: float | None = ..., - # morally this should be optional - ) -> tuple[AnyStr, AnyStr]: ... + def wait(self, timeout: float | None = None) -> int: ... + # morally the members of the returned tuple should be optional + # TODO this should allow ReadableBuffer for Popen[bytes], but adding + # overloads for that runs into a mypy bug (python/mypy#14070). + def communicate(self, input: AnyStr | None = None, timeout: float | None = None) -> tuple[AnyStr, AnyStr]: ... def send_signal(self, sig: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... @@ -2575,12 +2569,12 @@ class Popen(Generic[AnyStr]): # The result really is always a str. if sys.version_info >= (3, 11): - def getstatusoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> tuple[int, str]: ... - def getoutput(cmd: _TXT, *, encoding: str | None = ..., errors: str | None = ...) -> str: ... + def getstatusoutput(cmd: str | bytes, *, encoding: str | None = None, errors: str | None = None) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes, *, encoding: str | None = None, errors: str | None = None) -> str: ... else: - def getstatusoutput(cmd: _TXT) -> tuple[int, str]: ... - def getoutput(cmd: _TXT) -> str: ... + def getstatusoutput(cmd: str | bytes) -> tuple[int, str]: ... + def getoutput(cmd: str | bytes) -> str: ... if sys.version_info >= (3, 8): def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented @@ -2593,12 +2587,12 @@ if sys.platform == "win32": def __init__( self, *, - dwFlags: int = ..., - hStdInput: Any | None = ..., - hStdOutput: Any | None = ..., - hStdError: Any | None = ..., - wShowWindow: int = ..., - lpAttributeList: Mapping[str, Any] | None = ..., + dwFlags: int = 0, + hStdInput: Any | None = None, + hStdOutput: Any | None = None, + hStdError: Any | None = None, + wShowWindow: int = 0, + lpAttributeList: Mapping[str, Any] | None = None, ) -> None: ... dwFlags: int hStdInput: Any | None diff --git a/mypy/typeshed/stdlib/sunau.pyi b/mypy/typeshed/stdlib/sunau.pyi index 5b21cb03d4a3..6109b368c01a 100644 --- a/mypy/typeshed/stdlib/sunau.pyi +++ b/mypy/typeshed/stdlib/sunau.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import Self +from _typeshed import Unused from typing import IO, Any, NamedTuple, NoReturn, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias _File: TypeAlias = str | IO[bytes] @@ -32,8 +32,8 @@ class _sunau_params(NamedTuple): class Au_read: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def getfp(self) -> IO[bytes] | None: ... def rewind(self) -> None: ... def close(self) -> None: ... @@ -52,8 +52,8 @@ class Au_read: class Au_write: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... @@ -78,7 +78,7 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Au_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Au_write: ... @overload -def open(f: _File, mode: str | None = ...) -> Any: ... +def open(f: _File, mode: str | None = None) -> Any: ... if sys.version_info < (3, 9): openfp = open diff --git a/mypy/typeshed/stdlib/symtable.pyi b/mypy/typeshed/stdlib/symtable.pyi index d44b2d7927b3..304ae8bf8126 100644 --- a/mypy/typeshed/stdlib/symtable.pyi +++ b/mypy/typeshed/stdlib/symtable.pyi @@ -38,11 +38,11 @@ class Class(SymbolTable): class Symbol: if sys.version_info >= (3, 8): def __init__( - self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = ..., *, module_scope: bool = ... + self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None, *, module_scope: bool = False ) -> None: ... def is_nonlocal(self) -> bool: ... else: - def __init__(self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = ...) -> None: ... + def __init__(self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = None) -> None: ... def get_name(self) -> str: ... def is_referenced(self) -> bool: ... @@ -59,6 +59,5 @@ class Symbol: def get_namespace(self) -> SymbolTable: ... class SymbolTableFactory: - def __init__(self) -> None: ... def new(self, table: Any, filename: str) -> SymbolTable: ... def __call__(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index a1c875561a87..e12881599b4a 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -7,10 +7,12 @@ from importlib.machinery import ModuleSpec from io import TextIOWrapper from types import FrameType, ModuleType, TracebackType from typing import Any, NoReturn, Protocol, TextIO, TypeVar, overload -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Final, Literal, TypeAlias, final _T = TypeVar("_T") +# see https://github.com/python/typeshed/issues/8513#issue-1333671093 for the rationale behind this alias +_ExitCode: TypeAlias = str | int | None _OptExcInfo: TypeAlias = OptExcInfo # noqa: Y047 # TODO: obsolete, remove fall 2022 or later # Intentionally omits one deprecated and one optional method of `importlib.abc.MetaPathFinder` @@ -60,9 +62,10 @@ stdout: TextIO stderr: TextIO if sys.version_info >= (3, 10): stdlib_module_names: frozenset[str] -__stdin__: TextIOWrapper -__stdout__: TextIOWrapper -__stderr__: TextIOWrapper + +__stdin__: Final[TextIOWrapper] # Contains the original value of stdin +__stdout__: Final[TextIOWrapper] # Contains the original value of stdout +__stderr__: Final[TextIOWrapper] # Contains the original value of stderr tracebacklimit: int version: str api_version: int @@ -188,11 +191,15 @@ class _implementation: int_info: _int_info @final -class _int_info(structseq[int], tuple[int, int]): +class _int_info(structseq[int], tuple[int, int, int, int]): @property def bits_per_digit(self) -> int: ... @property def sizeof_digit(self) -> int: ... + @property + def default_max_str_digits(self) -> int: ... + @property + def str_digits_check_threshold(self) -> int: ... @final class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]): @@ -212,7 +219,7 @@ version_info: _version_info def call_tracing(__func: Callable[..., _T], __args: Any) -> _T: ... def _clear_type_cache() -> None: ... def _current_frames() -> dict[int, FrameType]: ... -def _getframe(__depth: int = ...) -> FrameType: ... +def _getframe(__depth: int = 0) -> FrameType: ... def _debugmallocstats() -> None: ... def __displayhook__(__value: object) -> None: ... def __excepthook__(__exctype: type[BaseException], __value: BaseException, __traceback: TracebackType | None) -> None: ... @@ -221,8 +228,7 @@ def exc_info() -> OptExcInfo: ... if sys.version_info >= (3, 11): def exception() -> BaseException | None: ... -# sys.exit() accepts an optional argument of anything printable -def exit(__status: object = ...) -> NoReturn: ... +def exit(__status: _ExitCode = None) -> NoReturn: ... def getallocatedblocks() -> int: ... def getdefaultencoding() -> str: ... @@ -272,11 +278,10 @@ if sys.platform == "win32": def intern(__string: str) -> str: ... def is_finalizing() -> bool: ... - -__breakpointhook__: Any # contains the original value of breakpointhook - def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... +__breakpointhook__ = breakpointhook # Contains the original value of breakpointhook + if sys.platform != "win32": def setdlopenflags(__flags: int) -> None: ... @@ -299,7 +304,7 @@ if sys.version_info >= (3, 8): exc_value: BaseException | None exc_traceback: TracebackType | None err_msg: str | None - object: _object | None + object: _object unraisablehook: Callable[[UnraisableHookArgs], Any] def __unraisablehook__(__unraisable: UnraisableHookArgs) -> Any: ... def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... @@ -327,3 +332,8 @@ if sys.version_info < (3, 8): _CoroWrapper: TypeAlias = Callable[[Coroutine[Any, Any, Any]], Any] def set_coroutine_wrapper(__wrapper: _CoroWrapper) -> None: ... def get_coroutine_wrapper() -> _CoroWrapper: ... + +# The following two functions were added in 3.11.0, 3.10.7, 3.9.14, 3.8.14, & 3.7.14, +# as part of the response to CVE-2020-10735 +def set_int_max_str_digits(maxdigits: int) -> None: ... +def get_int_max_str_digits() -> int: ... diff --git a/mypy/typeshed/stdlib/sysconfig.pyi b/mypy/typeshed/stdlib/sysconfig.pyi index 03362b5caef9..7e29cf1326d6 100644 --- a/mypy/typeshed/stdlib/sysconfig.pyi +++ b/mypy/typeshed/stdlib/sysconfig.pyi @@ -16,7 +16,7 @@ __all__ = [ "parse_config_h", ] -def get_config_var(name: str) -> str | None: ... +def get_config_var(name: str) -> Any: ... @overload def get_config_vars() -> dict[str, Any]: ... @overload @@ -28,11 +28,17 @@ if sys.version_info >= (3, 10): def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... def get_path_names() -> tuple[str, ...]: ... -def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> str: ... -def get_paths(scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> dict[str, str]: ... +def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> str: ... +def get_paths(scheme: str = ..., vars: dict[str, Any] | None = None, expand: bool = True) -> dict[str, str]: ... def get_python_version() -> str: ... def get_platform() -> str: ... -def is_python_build(check_home: bool = ...) -> bool: ... -def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = ...) -> dict[str, Any]: ... + +if sys.version_info >= (3, 11): + def is_python_build(check_home: object = None) -> bool: ... + +else: + def is_python_build(check_home: bool = False) -> bool: ... + +def parse_config_h(fp: IO[Any], vars: dict[str, Any] | None = None) -> dict[str, Any]: ... def get_config_h_filename() -> str: ... def get_makefile_filename() -> str: ... diff --git a/mypy/typeshed/stdlib/tarfile.pyi b/mypy/typeshed/stdlib/tarfile.pyi index cf74899a8fb4..5cf1d55cac63 100644 --- a/mypy/typeshed/stdlib/tarfile.pyi +++ b/mypy/typeshed/stdlib/tarfile.pyi @@ -1,13 +1,13 @@ import bz2 import io import sys -from _typeshed import Self, StrOrBytesPath, StrPath -from builtins import list as _list, type as Type # aliases to avoid name clashes with fields named "type" or "list" +from _typeshed import StrOrBytesPath, StrPath +from builtins import list as _list # aliases to avoid name clashes with fields named "type" or "list" from collections.abc import Callable, Iterable, Iterator, Mapping from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj from types import TracebackType -from typing import IO, Protocol, overload -from typing_extensions import Literal +from typing import IO, ClassVar, Protocol, overload +from typing_extensions import Literal, Self __all__ = [ "TarFile", @@ -89,10 +89,10 @@ PAX_NAME_FIELDS: set[str] ENCODING: str def open( - name: StrOrBytesPath | None = ..., - mode: str = ..., - fileobj: IO[bytes] | None = ..., # depends on mode - bufsize: int = ..., + name: StrOrBytesPath | None = None, + mode: str = "r", + fileobj: IO[bytes] | None = None, # depends on mode + bufsize: int = 10240, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -110,7 +110,7 @@ class ExFileObject(io.BufferedReader): def __init__(self, tarfile: TarFile, tarinfo: TarInfo) -> None: ... class TarFile: - OPEN_METH: Mapping[str, str] + OPEN_METH: ClassVar[Mapping[str, str]] name: StrOrBytesPath | None mode: Literal["r", "a", "w", "x"] fileobj: _Fileobj | None @@ -127,32 +127,32 @@ class TarFile: offset: int # undocumented def __init__( self, - name: StrOrBytesPath | None = ..., - mode: Literal["r", "a", "w", "x"] = ..., - fileobj: _Fileobj | None = ..., - format: int | None = ..., - tarinfo: type[TarInfo] | None = ..., - dereference: bool | None = ..., - ignore_zeros: bool | None = ..., - encoding: str | None = ..., - errors: str = ..., - pax_headers: Mapping[str, str] | None = ..., - debug: int | None = ..., - errorlevel: int | None = ..., - copybufsize: int | None = ..., # undocumented + name: StrOrBytesPath | None = None, + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, + format: int | None = None, + tarinfo: type[TarInfo] | None = None, + dereference: bool | None = None, + ignore_zeros: bool | None = None, + encoding: str | None = None, + errors: str = "surrogateescape", + pax_headers: Mapping[str, str] | None = None, + debug: int | None = None, + errorlevel: int | None = None, + copybufsize: int | None = None, # undocumented ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def __iter__(self) -> Iterator[TarInfo]: ... @classmethod def open( - cls: type[Self], - name: StrOrBytesPath | None = ..., - mode: str = ..., - fileobj: IO[bytes] | None = ..., # depends on mode - bufsize: int = ..., + cls, + name: StrOrBytesPath | None = None, + mode: str = "r", + fileobj: IO[bytes] | None = None, # depends on mode + bufsize: int = 10240, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -166,10 +166,10 @@ class TarFile: ) -> Self: ... @classmethod def taropen( - cls: type[Self], + cls, name: StrOrBytesPath | None, - mode: Literal["r", "a", "w", "x"] = ..., - fileobj: _Fileobj | None = ..., + mode: Literal["r", "a", "w", "x"] = "r", + fileobj: _Fileobj | None = None, *, compresslevel: int = ..., format: int | None = ..., @@ -184,11 +184,11 @@ class TarFile: @overload @classmethod def gzopen( - cls: type[Self], + cls, name: StrOrBytesPath | None, - mode: Literal["r"] = ..., - fileobj: _GzipReadableFileobj | None = ..., - compresslevel: int = ..., + mode: Literal["r"] = "r", + fileobj: _GzipReadableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -202,11 +202,11 @@ class TarFile: @overload @classmethod def gzopen( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["w", "x"], - fileobj: _GzipWritableFileobj | None = ..., - compresslevel: int = ..., + fileobj: _GzipWritableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -220,11 +220,11 @@ class TarFile: @overload @classmethod def bz2open( - cls: type[Self], + cls, name: StrOrBytesPath | None, mode: Literal["w", "x"], - fileobj: _Bz2WritableFileobj | None = ..., - compresslevel: int = ..., + fileobj: _Bz2WritableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -238,11 +238,11 @@ class TarFile: @overload @classmethod def bz2open( - cls: type[Self], + cls, name: StrOrBytesPath | None, - mode: Literal["r"] = ..., - fileobj: _Bz2ReadableFileobj | None = ..., - compresslevel: int = ..., + mode: Literal["r"] = "r", + fileobj: _Bz2ReadableFileobj | None = None, + compresslevel: int = 9, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -255,11 +255,11 @@ class TarFile: ) -> Self: ... @classmethod def xzopen( - cls: type[Self], + cls, name: StrOrBytesPath | None, - mode: Literal["r", "w", "x"] = ..., - fileobj: IO[bytes] | None = ..., - preset: int | None = ..., + mode: Literal["r", "w", "x"] = "r", + fileobj: IO[bytes] | None = None, + preset: int | None = None, *, format: int | None = ..., tarinfo: type[TarInfo] | None = ..., @@ -273,16 +273,16 @@ class TarFile: def getmember(self, name: str) -> TarInfo: ... def getmembers(self) -> _list[TarInfo]: ... def getnames(self) -> _list[str]: ... - def list(self, verbose: bool = ..., *, members: _list[TarInfo] | None = ...) -> None: ... + def list(self, verbose: bool = True, *, members: _list[TarInfo] | None = None) -> None: ... def next(self) -> TarInfo | None: ... def extractall( - self, path: StrOrBytesPath = ..., members: Iterable[TarInfo] | None = ..., *, numeric_owner: bool = ... + self, path: StrOrBytesPath = ".", members: Iterable[TarInfo] | None = None, *, numeric_owner: bool = False ) -> None: ... def extract( - self, member: str | TarInfo, path: StrOrBytesPath = ..., set_attrs: bool = ..., *, numeric_owner: bool = ... + self, member: str | TarInfo, path: StrOrBytesPath = "", set_attrs: bool = True, *, numeric_owner: bool = False ) -> None: ... def _extract_member( - self, tarinfo: TarInfo, targetpath: str, set_attrs: bool = ..., numeric_owner: bool = ... + self, tarinfo: TarInfo, targetpath: str, set_attrs: bool = True, numeric_owner: bool = False ) -> None: ... # undocumented def extractfile(self, member: str | TarInfo) -> IO[bytes] | None: ... def makedir(self, tarinfo: TarInfo, targetpath: StrOrBytesPath) -> None: ... # undocumented @@ -297,14 +297,14 @@ class TarFile: def add( self, name: StrPath, - arcname: StrPath | None = ..., - recursive: bool = ..., + arcname: StrPath | None = None, + recursive: bool = True, *, - filter: Callable[[TarInfo], TarInfo | None] | None = ..., + filter: Callable[[TarInfo], TarInfo | None] | None = None, ) -> None: ... - def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = ...) -> None: ... + def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = None) -> None: ... def gettarinfo( - self, name: StrOrBytesPath | None = ..., arcname: str | None = ..., fileobj: IO[bytes] | None = ... + self, name: StrOrBytesPath | None = None, arcname: str | None = None, fileobj: IO[bytes] | None = None ) -> TarInfo: ... def close(self) -> None: ... @@ -344,17 +344,21 @@ class TarInfo: uname: str gname: str pax_headers: Mapping[str, str] - def __init__(self, name: str = ...) -> None: ... + def __init__(self, name: str = "") -> None: ... @classmethod - def frombuf(cls: Type[Self], buf: bytes, encoding: str, errors: str) -> Self: ... + def frombuf(cls, buf: bytes | bytearray, encoding: str, errors: str) -> Self: ... @classmethod - def fromtarfile(cls: Type[Self], tarfile: TarFile) -> Self: ... + def fromtarfile(cls, tarfile: TarFile) -> Self: ... @property def linkpath(self) -> str: ... @linkpath.setter def linkpath(self, linkname: str) -> None: ... def get_info(self) -> Mapping[str, str | int | bytes | Mapping[str, str]]: ... - def tobuf(self, format: int | None = ..., encoding: str | None = ..., errors: str = ...) -> bytes: ... + if sys.version_info >= (3, 8): + def tobuf(self, format: int | None = 2, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + else: + def tobuf(self, format: int | None = 1, encoding: str | None = "utf-8", errors: str = "surrogateescape") -> bytes: ... + def create_ustar_header( self, info: Mapping[str, str | int | bytes | Mapping[str, str]], encoding: str, errors: str ) -> bytes: ... diff --git a/mypy/typeshed/stdlib/telnetlib.pyi b/mypy/typeshed/stdlib/telnetlib.pyi index 67ae5fcc8055..10f6e4930f75 100644 --- a/mypy/typeshed/stdlib/telnetlib.pyi +++ b/mypy/typeshed/stdlib/telnetlib.pyi @@ -1,9 +1,9 @@ import socket -from _typeshed import Self from collections.abc import Callable, Sequence from re import Match, Pattern from types import TracebackType from typing import Any +from typing_extensions import Self __all__ = ["Telnet"] @@ -88,15 +88,15 @@ NOOPT: bytes class Telnet: host: str | None # undocumented - def __init__(self, host: str | None = ..., port: int = ..., timeout: float = ...) -> None: ... - def open(self, host: str, port: int = ..., timeout: float = ...) -> None: ... + def __init__(self, host: str | None = None, port: int = 0, timeout: float = ...) -> None: ... + def open(self, host: str, port: int = 0, timeout: float = ...) -> None: ... def msg(self, msg: str, *args: Any) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def close(self) -> None: ... def get_socket(self) -> socket.socket: ... def fileno(self) -> int: ... def write(self, buffer: bytes) -> None: ... - def read_until(self, match: bytes, timeout: float | None = ...) -> bytes: ... + def read_until(self, match: bytes, timeout: float | None = None) -> bytes: ... def read_all(self) -> bytes: ... def read_some(self) -> bytes: ... def read_very_eager(self) -> bytes: ... @@ -113,9 +113,9 @@ class Telnet: def mt_interact(self) -> None: ... def listener(self) -> None: ... def expect( - self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = ... + self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = None ) -> tuple[int, Match[bytes] | None, bytes]: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index 2c096f0fb4de..dbff6d632d02 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -1,10 +1,10 @@ import io import sys -from _typeshed import BytesPath, GenericPath, Self, StrPath, WriteableBuffer +from _typeshed import BytesPath, GenericPath, StrPath, WriteableBuffer from collections.abc import Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -37,76 +37,76 @@ if sys.version_info >= (3, 8): @overload def NamedTemporaryFile( mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, *, - errors: str | None = ..., + errors: str | None = None, ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, *, - errors: str | None = ..., + errors: str | None = None, ) -> _TemporaryFileWrapper[bytes]: ... @overload def NamedTemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, *, - errors: str | None = ..., + errors: str | None = None, ) -> _TemporaryFileWrapper[Any]: ... else: @overload def NamedTemporaryFile( mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, ) -> _TemporaryFileWrapper[str]: ... @overload def NamedTemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, ) -> _TemporaryFileWrapper[bytes]: ... @overload def NamedTemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., - delete: bool = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, + delete: bool = True, ) -> _TemporaryFileWrapper[Any]: ... if sys.platform == "win32": @@ -116,38 +116,38 @@ else: @overload def TemporaryFile( mode: _StrMode, - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> IO[str]: ... @overload def TemporaryFile( - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> IO[bytes]: ... @overload def TemporaryFile( - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: AnyStr | None = ..., - prefix: AnyStr | None = ..., - dir: GenericPath[AnyStr] | None = ..., + mode: str = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: GenericPath[AnyStr] | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> IO[Any]: ... else: @overload @@ -185,8 +185,8 @@ class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): file: IO[AnyStr] # io.TextIOWrapper, io.BufferedReader or io.BufferedWriter name: str delete: bool - def __init__(self, file: IO[AnyStr], name: str, delete: bool = ...) -> None: ... - def __enter__(self: Self) -> Self: ... + def __init__(self, file: IO[AnyStr], name: str, delete: bool = True) -> None: ... + def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... def close(self) -> None: ... @@ -235,44 +235,72 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): @overload def __init__( self: SpooledTemporaryFile[bytes], - max_size: int = ..., - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int = 0, + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, *, - errors: str | None = ..., + errors: str | None = None, ) -> None: ... @overload def __init__( self: SpooledTemporaryFile[str], - max_size: int = ..., - mode: _StrMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, *, - errors: str | None = ..., + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, ) -> None: ... @overload def __init__( self, - max_size: int = ..., - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, *, - errors: str | None = ..., + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + errors: str | None = None, ) -> None: ... @property def errors(self) -> str | None: ... @@ -280,42 +308,68 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): @overload def __init__( self: SpooledTemporaryFile[bytes], - max_size: int = ..., - mode: _BytesMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int = 0, + mode: _BytesMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, ) -> None: ... @overload def __init__( self: SpooledTemporaryFile[str], - max_size: int = ..., - mode: _StrMode = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = 0, + *, + mode: _StrMode, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, ) -> None: ... @overload def __init__( self, - max_size: int = ..., - mode: str = ..., - buffering: int = ..., - encoding: str | None = ..., - newline: str | None = ..., - suffix: str | None = ..., - prefix: str | None = ..., - dir: str | None = ..., + max_size: int, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + ) -> None: ... + @overload + def __init__( + self, + max_size: int = 0, + *, + mode: str, + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, ) -> None: ... def rollover(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> None: ... # These methods are copied from the abstract methods of IO, because # SpooledTemporaryFile implements IO. @@ -337,7 +391,7 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def readlines(self, __hint: int = ...) -> list[AnyStr]: ... # type: ignore[override] def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... - def truncate(self, size: int | None = ...) -> None: ... # type: ignore[override] + def truncate(self, size: int | None = None) -> None: ... # type: ignore[override] def write(self, s: AnyStr) -> int: ... def writelines(self, iterable: Iterable[AnyStr]) -> None: ... # type: ignore[override] def __iter__(self) -> Iterator[AnyStr]: ... # type: ignore[override] @@ -355,27 +409,30 @@ class TemporaryDirectory(Generic[AnyStr]): @overload def __init__( self: TemporaryDirectory[str], - suffix: str | None = ..., - prefix: str | None = ..., - dir: StrPath | None = ..., - ignore_cleanup_errors: bool = ..., + suffix: str | None = None, + prefix: str | None = None, + dir: StrPath | None = None, + ignore_cleanup_errors: bool = False, ) -> None: ... @overload def __init__( self: TemporaryDirectory[bytes], - suffix: bytes | None = ..., - prefix: bytes | None = ..., - dir: BytesPath | None = ..., - ignore_cleanup_errors: bool = ..., + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, + ignore_cleanup_errors: bool = False, ) -> None: ... else: @overload def __init__( - self: TemporaryDirectory[str], suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ... + self: TemporaryDirectory[str], suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None ) -> None: ... @overload def __init__( - self: TemporaryDirectory[bytes], suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ... + self: TemporaryDirectory[bytes], + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: BytesPath | None = None, ) -> None: ... def cleanup(self) -> None: ... @@ -387,19 +444,19 @@ class TemporaryDirectory(Generic[AnyStr]): # The overloads overlap, but they should still work fine. @overload def mkstemp( # type: ignore[misc] - suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ..., text: bool = ... + suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False ) -> tuple[int, str]: ... @overload def mkstemp( - suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ..., text: bool = ... + suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None, text: bool = False ) -> tuple[int, bytes]: ... # The overloads overlap, but they should still work fine. @overload -def mkdtemp(suffix: str | None = ..., prefix: str | None = ..., dir: StrPath | None = ...) -> str: ... # type: ignore[misc] +def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... # type: ignore[misc] @overload -def mkdtemp(suffix: bytes | None = ..., prefix: bytes | None = ..., dir: BytesPath | None = ...) -> bytes: ... -def mktemp(suffix: str = ..., prefix: str = ..., dir: StrPath | None = ...) -> str: ... +def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ... +def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ... def gettempdirb() -> bytes: ... def gettempprefixb() -> bytes: ... def gettempdir() -> str: ... diff --git a/mypy/typeshed/stdlib/termios.pyi b/mypy/typeshed/stdlib/termios.pyi index 494162a49b38..bf8d7bee2473 100644 --- a/mypy/typeshed/stdlib/termios.pyi +++ b/mypy/typeshed/stdlib/termios.pyi @@ -4,9 +4,9 @@ from typing import Any from typing_extensions import TypeAlias if sys.platform != "win32": + # Must be a list of length 7, containing 6 ints and a list of NCCS 1-character bytes or ints. _Attr: TypeAlias = list[int | list[bytes | int]] - # TODO constants not really documented B0: int B1000000: int B110: int @@ -44,17 +44,22 @@ if sys.platform != "win32": BSDLY: int CBAUD: int CBAUDEX: int + CDEL: int CDSUSP: int CEOF: int CEOL: int + CEOL2: int CEOT: int CERASE: int + CESC: int CFLUSH: int CIBAUD: int CINTR: int CKILL: int CLNEXT: int CLOCAL: int + CNUL: int + COMMON: int CQUIT: int CR0: int CR1: int @@ -73,6 +78,7 @@ if sys.platform != "win32": CSTOP: int CSTOPB: int CSUSP: int + CSWTCH: int CWERASE: int ECHO: int ECHOCTL: int @@ -93,6 +99,7 @@ if sys.platform != "win32": FIONREAD: int FLUSHO: int HUPCL: int + IBSHIFT: int ICANON: int ICRNL: int IEXTEN: int @@ -100,6 +107,7 @@ if sys.platform != "win32": IGNCR: int IGNPAR: int IMAXBEL: int + INIT_C_CC: int INLCR: int INPCK: int IOCSIZE_MASK: int @@ -110,17 +118,18 @@ if sys.platform != "win32": IXANY: int IXOFF: int IXON: int + N_MOUSE: int + N_PPP: int + N_SLIP: int + N_STRIP: int + N_TTY: int NCC: int NCCS: int NL0: int NL1: int NLDLY: int NOFLSH: int - N_MOUSE: int - N_PPP: int - N_SLIP: int - N_STRIP: int - N_TTY: int + NSWTCH: int OCRNL: int OFDEL: int OFILL: int @@ -151,6 +160,7 @@ if sys.platform != "win32": TCSADRAIN: int TCSAFLUSH: int TCSANOW: int + TCSASOFT: int TCSBRK: int TCSBRKP: int TCSETA: int @@ -167,15 +177,11 @@ if sys.platform != "win32": TIOCGLCKTRMIOS: int TIOCGPGRP: int TIOCGSERIAL: int + TIOCGSIZE: int TIOCGSOFTCAR: int TIOCGWINSZ: int TIOCINQ: int TIOCLINUX: int - TIOCMBIC: int - TIOCMBIS: int - TIOCMGET: int - TIOCMIWAIT: int - TIOCMSET: int TIOCM_CAR: int TIOCM_CD: int TIOCM_CTS: int @@ -187,10 +193,14 @@ if sys.platform != "win32": TIOCM_RTS: int TIOCM_SR: int TIOCM_ST: int + TIOCMBIC: int + TIOCMBIS: int + TIOCMGET: int + TIOCMIWAIT: int + TIOCMSET: int TIOCNOTTY: int TIOCNXCL: int TIOCOUTQ: int - TIOCPKT: int TIOCPKT_DATA: int TIOCPKT_DOSTOP: int TIOCPKT_FLUSHREAD: int @@ -198,7 +208,9 @@ if sys.platform != "win32": TIOCPKT_NOSTOP: int TIOCPKT_START: int TIOCPKT_STOP: int + TIOCPKT: int TIOCSCTTY: int + TIOCSER_TEMT: int TIOCSERCONFIG: int TIOCSERGETLSR: int TIOCSERGETMULTI: int @@ -206,14 +218,15 @@ if sys.platform != "win32": TIOCSERGWILD: int TIOCSERSETMULTI: int TIOCSERSWILD: int - TIOCSER_TEMT: int TIOCSETD: int TIOCSLCKTRMIOS: int TIOCSPGRP: int TIOCSSERIAL: int + TIOCSSIZE: int TIOCSSOFTCAR: int TIOCSTI: int TIOCSWINSZ: int + TIOCTTYGSTRUCT: int TOSTOP: int VDISCARD: int VEOF: int @@ -238,7 +251,8 @@ if sys.platform != "win32": VWERASE: int XCASE: int XTABS: int - def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... + + def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... # Returns _Attr; we use Any to avoid a union in the return type def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... def tcdrain(__fd: FileDescriptorLike) -> None: ... diff --git a/mypy/typeshed/stdlib/textwrap.pyi b/mypy/typeshed/stdlib/textwrap.pyi index 9e423cb5ce94..e4a5b7899e8e 100644 --- a/mypy/typeshed/stdlib/textwrap.pyi +++ b/mypy/typeshed/stdlib/textwrap.pyi @@ -27,19 +27,19 @@ class TextWrapper: x: str # leaked loop variable def __init__( self, - width: int = ..., - initial_indent: str = ..., - subsequent_indent: str = ..., - expand_tabs: bool = ..., - replace_whitespace: bool = ..., - fix_sentence_endings: bool = ..., - break_long_words: bool = ..., - drop_whitespace: bool = ..., - break_on_hyphens: bool = ..., - tabsize: int = ..., + width: int = 70, + initial_indent: str = "", + subsequent_indent: str = "", + expand_tabs: bool = True, + replace_whitespace: bool = True, + fix_sentence_endings: bool = False, + break_long_words: bool = True, + drop_whitespace: bool = True, + break_on_hyphens: bool = True, + tabsize: int = 8, *, - max_lines: int | None = ..., - placeholder: str = ..., + max_lines: int | None = None, + placeholder: str = " [...]", ) -> None: ... # Private methods *are* part of the documented API for subclasses. def _munge_whitespace(self, text: str) -> str: ... @@ -53,7 +53,7 @@ class TextWrapper: def wrap( text: str, - width: int = ..., + width: int = 70, *, initial_indent: str = ..., subsequent_indent: str = ..., @@ -69,7 +69,7 @@ def wrap( ) -> list[str]: ... def fill( text: str, - width: int = ..., + width: int = 70, *, initial_indent: str = ..., subsequent_indent: str = ..., @@ -100,4 +100,4 @@ def shorten( placeholder: str = ..., ) -> str: ... def dedent(text: str) -> str: ... -def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = ...) -> str: ... +def indent(text: str, prefix: str, predicate: Callable[[str], bool] | None = None) -> str: ... diff --git a/mypy/typeshed/stdlib/threading.pyi b/mypy/typeshed/stdlib/threading.pyi index 289a86826ecd..c0b344fe757d 100644 --- a/mypy/typeshed/stdlib/threading.pyi +++ b/mypy/typeshed/stdlib/threading.pyi @@ -74,17 +74,17 @@ class Thread: daemon: bool def __init__( self, - group: None = ..., - target: Callable[..., object] | None = ..., - name: str | None = ..., + group: None = None, + target: Callable[..., object] | None = None, + name: str | None = None, args: Iterable[Any] = ..., - kwargs: Mapping[str, Any] | None = ..., + kwargs: Mapping[str, Any] | None = None, *, - daemon: bool | None = ..., + daemon: bool | None = None, ) -> None: ... def start(self) -> None: ... def run(self) -> None: ... - def join(self, timeout: float | None = ...) -> None: ... + def join(self, timeout: float | None = None) -> None: ... if sys.version_info >= (3, 8): @property def native_id(self) -> int | None: ... # only available on some platforms @@ -102,7 +102,6 @@ class _DummyThread(Thread): def __init__(self) -> None: ... class Lock: - def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None @@ -112,8 +111,7 @@ class Lock: def locked(self) -> bool: ... class _RLock: - def __init__(self) -> None: ... - def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ... def release(self) -> None: ... __enter__ = acquire def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... @@ -121,39 +119,38 @@ class _RLock: RLock = _RLock class Condition: - def __init__(self, lock: Lock | _RLock | None = ...) -> None: ... + def __init__(self, lock: Lock | _RLock | None = None) -> None: ... def __enter__(self) -> bool: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... - def wait_for(self, predicate: Callable[[], _T], timeout: float | None = ...) -> _T: ... - def notify(self, n: int = ...) -> None: ... + def wait(self, timeout: float | None = None) -> bool: ... + def wait_for(self, predicate: Callable[[], _T], timeout: float | None = None) -> _T: ... + def notify(self, n: int = 1) -> None: ... def notify_all(self) -> None: ... def notifyAll(self) -> None: ... # deprecated alias for notify_all() class Semaphore: _value: int - def __init__(self, value: int = ...) -> None: ... + def __init__(self, value: int = 1) -> None: ... def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... - def __enter__(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... + def acquire(self, blocking: bool = True, timeout: float | None = None) -> bool: ... + def __enter__(self, blocking: bool = True, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 9): - def release(self, n: int = ...) -> None: ... + def release(self, n: int = 1) -> None: ... else: def release(self) -> None: ... class BoundedSemaphore(Semaphore): ... class Event: - def __init__(self) -> None: ... def is_set(self) -> bool: ... def isSet(self) -> bool: ... # deprecated alias for is_set() def set(self) -> None: ... def clear(self) -> None: ... - def wait(self, timeout: float | None = ...) -> bool: ... + def wait(self, timeout: float | None = None) -> bool: ... if sys.version_info >= (3, 8): from _thread import _excepthook, _ExceptHookArgs @@ -172,8 +169,8 @@ class Timer(Thread): self, interval: float, function: Callable[..., object], - args: Iterable[Any] | None = ..., - kwargs: Mapping[str, Any] | None = ..., + args: Iterable[Any] | None = None, + kwargs: Mapping[str, Any] | None = None, ) -> None: ... def cancel(self) -> None: ... @@ -184,8 +181,8 @@ class Barrier: def n_waiting(self) -> int: ... @property def broken(self) -> bool: ... - def __init__(self, parties: int, action: Callable[[], None] | None = ..., timeout: float | None = ...) -> None: ... - def wait(self, timeout: float | None = ...) -> int: ... + def __init__(self, parties: int, action: Callable[[], None] | None = None, timeout: float | None = None) -> None: ... + def wait(self, timeout: float | None = None) -> int: ... def reset(self) -> None: ... def abort(self) -> None: ... diff --git a/mypy/typeshed/stdlib/timeit.pyi b/mypy/typeshed/stdlib/timeit.pyi index dda6cefed0f6..a5da943c8484 100644 --- a/mypy/typeshed/stdlib/timeit.pyi +++ b/mypy/typeshed/stdlib/timeit.pyi @@ -11,22 +11,22 @@ default_timer: _Timer class Timer: def __init__( - self, stmt: _Stmt = ..., setup: _Stmt = ..., timer: _Timer = ..., globals: dict[str, Any] | None = ... + self, stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., globals: dict[str, Any] | None = None ) -> None: ... - def print_exc(self, file: IO[str] | None = ...) -> None: ... - def timeit(self, number: int = ...) -> float: ... - def repeat(self, repeat: int = ..., number: int = ...) -> list[float]: ... - def autorange(self, callback: Callable[[int, float], object] | None = ...) -> tuple[int, float]: ... + def print_exc(self, file: IO[str] | None = None) -> None: ... + def timeit(self, number: int = 1000000) -> float: ... + def repeat(self, repeat: int = 5, number: int = 1000000) -> list[float]: ... + def autorange(self, callback: Callable[[int, float], object] | None = None) -> tuple[int, float]: ... def timeit( - stmt: _Stmt = ..., setup: _Stmt = ..., timer: _Timer = ..., number: int = ..., globals: dict[str, Any] | None = ... + stmt: _Stmt = "pass", setup: _Stmt = "pass", timer: _Timer = ..., number: int = 1000000, globals: dict[str, Any] | None = None ) -> float: ... def repeat( - stmt: _Stmt = ..., - setup: _Stmt = ..., + stmt: _Stmt = "pass", + setup: _Stmt = "pass", timer: _Timer = ..., - repeat: int = ..., - number: int = ..., - globals: dict[str, Any] | None = ..., + repeat: int = 5, + number: int = 1000000, + globals: dict[str, Any] | None = None, ) -> list[float]: ... -def main(args: Sequence[str] | None = ..., *, _wrap_timer: Callable[[_Timer], _Timer] | None = ...) -> None: ... +def main(args: Sequence[str] | None = None, *, _wrap_timer: Callable[[_Timer], _Timer] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index d8dd463b5a8c..1d30e4b73c23 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -6,7 +6,7 @@ from enum import Enum from tkinter.constants import * from tkinter.font import _FontDescription from types import TracebackType -from typing import Any, Generic, NamedTuple, Protocol, TypeVar, Union, overload +from typing import Any, Generic, NamedTuple, Protocol, TypeVar, overload from typing_extensions import Literal, TypeAlias, TypedDict if sys.version_info >= (3, 9): @@ -178,19 +178,17 @@ _ButtonCommand: TypeAlias = str | Callable[[], Any] # accepts string of tcl cod _CanvasItemId: TypeAlias = int _Color: TypeAlias = str # typically '#rrggbb', '#rgb' or color names. _Compound: TypeAlias = Literal["top", "left", "center", "right", "bottom", "none"] # -compound in manual page named 'options' -_Cursor: TypeAlias = Union[ - str, tuple[str], tuple[str, str], tuple[str, str, str], tuple[str, str, str, str] -] # manual page: Tk_GetCursor -_EntryValidateCommand: TypeAlias = ( - str | list[str] | tuple[str, ...] | Callable[[], bool] -) # example when it's sequence: entry['invalidcommand'] = [entry.register(print), '%P'] -_GridIndex: TypeAlias = int | str | Literal["all"] +# manual page: Tk_GetCursor +_Cursor: TypeAlias = str | tuple[str] | tuple[str, str] | tuple[str, str, str] | tuple[str, str, str, str] +# example when it's sequence: entry['invalidcommand'] = [entry.register(print), '%P'] +_EntryValidateCommand: TypeAlias = str | list[str] | tuple[str, ...] | Callable[[], bool] +_GridIndex: TypeAlias = int | str _ImageSpec: TypeAlias = _Image | str # str can be from e.g. tkinter.image_names() _Relief: TypeAlias = Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] # manual page: Tk_GetRelief _ScreenUnits: TypeAlias = str | float # Often the right type instead of int. Manual page: Tk_GetPixels # -xscrollcommand and -yscrollcommand in 'options' manual page _XYScrollCommand: TypeAlias = str | Callable[[float, float], object] -_TakeFocusValue: TypeAlias = Union[int, Literal[""], Callable[[str], bool | None]] # -takefocus in manual page named 'options' +_TakeFocusValue: TypeAlias = int | Literal[""] | Callable[[str], bool | None] # -takefocus in manual page named 'options' if sys.version_info >= (3, 11): class _VersionInfoType(NamedTuple): @@ -271,7 +269,7 @@ def NoDefaultRoot() -> None: ... _TraceMode: TypeAlias = Literal["array", "read", "write", "unset"] class Variable: - def __init__(self, master: Misc | None = ..., value: Incomplete | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: Incomplete | None = None, name: str | None = None) -> None: ... def set(self, value) -> None: ... initialize = set def get(self): ... @@ -285,30 +283,30 @@ class Variable: def __eq__(self, other: object) -> bool: ... class StringVar(Variable): - def __init__(self, master: Misc | None = ..., value: str | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: str | None = None, name: str | None = None) -> None: ... def set(self, value: str) -> None: ... initialize = set def get(self) -> str: ... class IntVar(Variable): - def __init__(self, master: Misc | None = ..., value: int | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: int | None = None, name: str | None = None) -> None: ... def set(self, value: int) -> None: ... initialize = set def get(self) -> int: ... class DoubleVar(Variable): - def __init__(self, master: Misc | None = ..., value: float | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: float | None = None, name: str | None = None) -> None: ... def set(self, value: float) -> None: ... initialize = set def get(self) -> float: ... class BooleanVar(Variable): - def __init__(self, master: Misc | None = ..., value: bool | None = ..., name: str | None = ...) -> None: ... + def __init__(self, master: Misc | None = None, value: bool | None = None, name: str | None = None) -> None: ... def set(self, value: bool) -> None: ... initialize = set def get(self) -> bool: ... -def mainloop(n: int = ...) -> None: ... +def mainloop(n: int = 0) -> None: ... getint: Incomplete getdouble: Incomplete @@ -327,15 +325,15 @@ class Misc: children: dict[str, Widget] def destroy(self) -> None: ... def deletecommand(self, name: str) -> None: ... - def tk_strictMotif(self, boolean: Incomplete | None = ...): ... + def tk_strictMotif(self, boolean: Incomplete | None = None): ... def tk_bisque(self) -> None: ... def tk_setPalette(self, *args, **kw) -> None: ... - def wait_variable(self, name: str | Variable = ...) -> None: ... + def wait_variable(self, name: str | Variable = "PY_VAR") -> None: ... waitvar = wait_variable - def wait_window(self, window: Misc | None = ...) -> None: ... - def wait_visibility(self, window: Misc | None = ...) -> None: ... - def setvar(self, name: str = ..., value: str = ...) -> None: ... - def getvar(self, name: str = ...): ... + def wait_window(self, window: Misc | None = None) -> None: ... + def wait_visibility(self, window: Misc | None = None) -> None: ... + def setvar(self, name: str = "PY_VAR", value: str = "1") -> None: ... + def getvar(self, name: str = "PY_VAR"): ... def getint(self, s): ... def getdouble(self, s): ... def getboolean(self, s): ... @@ -349,13 +347,13 @@ class Misc: def tk_focusNext(self) -> Misc | None: ... def tk_focusPrev(self) -> Misc | None: ... @overload - def after(self, ms: int, func: None = ...) -> None: ... + def after(self, ms: int, func: None = None) -> None: ... @overload def after(self, ms: int | Literal["idle"], func: Callable[..., object], *args: Any) -> str: ... # after_idle is essentially partialmethod(after, "idle") def after_idle(self, func: Callable[..., object], *args: Any) -> str: ... def after_cancel(self, id: str) -> None: ... - def bell(self, displayof: Literal[0] | Misc | None = ...) -> None: ... + def bell(self, displayof: Literal[0] | Misc | None = 0) -> None: ... def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... @@ -365,42 +363,42 @@ class Misc: def grab_set_global(self) -> None: ... def grab_status(self) -> Literal["local", "global"] | None: ... def option_add( - self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = ... + self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = None ) -> None: ... def option_clear(self) -> None: ... def option_get(self, name, className): ... - def option_readfile(self, fileName, priority: Incomplete | None = ...) -> None: ... + def option_readfile(self, fileName, priority: Incomplete | None = None) -> None: ... def selection_clear(self, **kw) -> None: ... def selection_get(self, **kw): ... def selection_handle(self, command, **kw) -> None: ... def selection_own(self, **kw) -> None: ... def selection_own_get(self, **kw): ... def send(self, interp, cmd, *args): ... - def lower(self, belowThis: Incomplete | None = ...) -> None: ... - def tkraise(self, aboveThis: Incomplete | None = ...) -> None: ... + def lower(self, belowThis: Incomplete | None = None) -> None: ... + def tkraise(self, aboveThis: Incomplete | None = None) -> None: ... lift = tkraise if sys.version_info >= (3, 11): def info_patchlevel(self) -> _VersionInfoType: ... - def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = ...) -> int: ... - def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = ...) -> str: ... + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = 0) -> int: ... + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = 0) -> str: ... def winfo_cells(self) -> int: ... def winfo_children(self) -> list[Widget]: ... # Widget because it can't be Toplevel or Tk def winfo_class(self) -> str: ... def winfo_colormapfull(self) -> bool: ... - def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = ...) -> Misc | None: ... + def winfo_containing(self, rootX: int, rootY: int, displayof: Literal[0] | Misc | None = 0) -> Misc | None: ... def winfo_depth(self) -> int: ... def winfo_exists(self) -> bool: ... def winfo_fpixels(self, number: _ScreenUnits) -> float: ... def winfo_geometry(self) -> str: ... def winfo_height(self) -> int: ... def winfo_id(self) -> int: ... - def winfo_interps(self, displayof: Literal[0] | Misc | None = ...) -> tuple[str, ...]: ... + def winfo_interps(self, displayof: Literal[0] | Misc | None = 0) -> tuple[str, ...]: ... def winfo_ismapped(self) -> bool: ... def winfo_manager(self) -> str: ... def winfo_name(self) -> str: ... def winfo_parent(self) -> str: ... # return value needs nametowidget() - def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = ...): ... + def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = 0): ... def winfo_pixels(self, number: _ScreenUnits) -> int: ... def winfo_pointerx(self) -> int: ... def winfo_pointerxy(self) -> tuple[int, int]: ... @@ -423,7 +421,7 @@ class Misc: def winfo_viewable(self) -> bool: ... def winfo_visual(self) -> str: ... def winfo_visualid(self) -> str: ... - def winfo_visualsavailable(self, includeids: int = ...) -> list[tuple[str, int]]: ... + def winfo_visualsavailable(self, includeids: bool = False) -> list[tuple[str, int]]: ... def winfo_vrootheight(self) -> int: ... def winfo_vrootwidth(self) -> int: ... def winfo_vrootx(self) -> int: ... @@ -434,7 +432,7 @@ class Misc: def update(self) -> None: ... def update_idletasks(self) -> None: ... @overload - def bindtags(self, tagList: None = ...) -> tuple[str, ...]: ... + def bindtags(self, tagList: None = None) -> tuple[str, ...]: ... @overload def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... # bind with isinstance(func, str) doesn't return anything, but all other @@ -442,49 +440,49 @@ class Misc: @overload def bind( self, - sequence: str | None = ..., - func: Callable[[Event[Misc]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... # There's no way to know what type of widget bind_all and bind_class # callbacks will get, so those are Misc. @overload def bind_all( self, - sequence: str | None = ..., - func: Callable[[Event[Misc]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload def bind_class( self, className: str, - sequence: str | None = ..., - func: Callable[[Event[Misc]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Misc]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... - def unbind(self, sequence: str, funcid: str | None = ...) -> None: ... + def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def unbind(self, sequence: str, funcid: str | None = None) -> None: ... def unbind_all(self, sequence: str) -> None: ... def unbind_class(self, className: str, sequence: str) -> None: ... - def mainloop(self, n: int = ...) -> None: ... + def mainloop(self, n: int = 0) -> None: ... def quit(self) -> None: ... @property def _windowingsystem(self) -> Literal["win32", "aqua", "x11"]: ... def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... def register( - self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = ..., needcleanup: int = ... + self, func: Callable[..., object], subst: Callable[..., Sequence[Any]] | None = None, needcleanup: int = 1 ) -> str: ... def keys(self) -> list[str]: ... @overload @@ -492,14 +490,14 @@ class Misc: @overload def pack_propagate(self) -> None: ... propagate = pack_propagate - def grid_anchor(self, anchor: _Anchor | None = ...) -> None: ... + def grid_anchor(self, anchor: _Anchor | None = None) -> None: ... anchor = grid_anchor @overload def grid_bbox( - self, column: None = ..., row: None = ..., col2: None = ..., row2: None = ... + self, column: None = None, row: None = None, col2: None = None, row2: None = None ) -> tuple[int, int, int, int] | None: ... @overload - def grid_bbox(self, column: int, row: int, col2: None = ..., row2: None = ...) -> tuple[int, int, int, int] | None: ... + def grid_bbox(self, column: int, row: int, col2: None = None, row2: None = None) -> tuple[int, int, int, int] | None: ... @overload def grid_bbox(self, column: int, row: int, col2: int, row2: int) -> tuple[int, int, int, int] | None: ... bbox = grid_bbox @@ -534,7 +532,7 @@ class Misc: size = grid_size # Widget because Toplevel or Tk is never a slave def pack_slaves(self) -> list[Widget]: ... - def grid_slaves(self, row: int | None = ..., column: int | None = ...) -> list[Widget]: ... + def grid_slaves(self, row: int | None = None, column: int | None = None) -> list[Widget]: ... def place_slaves(self) -> list[Widget]: ... slaves = pack_slaves def event_add(self, virtual: str, *sequences: str) -> None: ... @@ -571,14 +569,14 @@ class Misc: x: _ScreenUnits = ..., y: _ScreenUnits = ..., ) -> None: ... - def event_info(self, virtual: str | None = ...) -> tuple[str, ...]: ... + def event_info(self, virtual: str | None = None) -> tuple[str, ...]: ... def image_names(self) -> tuple[str, ...]: ... def image_types(self) -> tuple[str, ...]: ... # See #4363 and #4891 def __setitem__(self, key: str, value: Any) -> None: ... def __getitem__(self, key: str) -> Any: ... def cget(self, key: str) -> Any: ... - def configure(self, cnf: Any = ...) -> Any: ... + def configure(self, cnf: Any = None) -> Any: ... # TODO: config is an alias of configure, but adding that here creates lots of mypy errors class CallWrapper: @@ -615,7 +613,7 @@ class Wm: def wm_aspect(self, minNumer: int, minDenom: int, maxNumer: int, maxDenom: int) -> None: ... @overload def wm_aspect( - self, minNumer: None = ..., minDenom: None = ..., maxNumer: None = ..., maxDenom: None = ... + self, minNumer: None = None, minDenom: None = None, maxNumer: None = None, maxDenom: None = None ) -> tuple[int, int, int, int] | None: ... aspect = wm_aspect @overload @@ -625,7 +623,7 @@ class Wm: @overload def wm_attributes(self, __option: str, __value, *__other_option_value_pairs: Any) -> None: ... attributes = wm_attributes - def wm_client(self, name: str | None = ...) -> str: ... + def wm_client(self, name: str | None = None) -> str: ... client = wm_client @overload def wm_colormapwindows(self) -> list[Misc]: ... @@ -634,91 +632,91 @@ class Wm: @overload def wm_colormapwindows(self, __first_wlist_item: Misc, *other_wlist_items: Misc) -> None: ... colormapwindows = wm_colormapwindows - def wm_command(self, value: str | None = ...) -> str: ... + def wm_command(self, value: str | None = None) -> str: ... command = wm_command # Some of these always return empty string, but return type is set to None to prevent accidentally using it def wm_deiconify(self) -> None: ... deiconify = wm_deiconify - def wm_focusmodel(self, model: Literal["active", "passive"] | None = ...) -> Literal["active", "passive", ""]: ... + def wm_focusmodel(self, model: Literal["active", "passive"] | None = None) -> Literal["active", "passive", ""]: ... focusmodel = wm_focusmodel def wm_forget(self, window: Wm) -> None: ... forget = wm_forget def wm_frame(self) -> str: ... frame = wm_frame @overload - def wm_geometry(self, newGeometry: None = ...) -> str: ... + def wm_geometry(self, newGeometry: None = None) -> str: ... @overload def wm_geometry(self, newGeometry: str) -> None: ... geometry = wm_geometry def wm_grid( self, - baseWidth: Incomplete | None = ..., - baseHeight: Incomplete | None = ..., - widthInc: Incomplete | None = ..., - heightInc: Incomplete | None = ..., + baseWidth: Incomplete | None = None, + baseHeight: Incomplete | None = None, + widthInc: Incomplete | None = None, + heightInc: Incomplete | None = None, ): ... grid = wm_grid - def wm_group(self, pathName: Incomplete | None = ...): ... + def wm_group(self, pathName: Incomplete | None = None): ... group = wm_group - def wm_iconbitmap(self, bitmap: Incomplete | None = ..., default: Incomplete | None = ...): ... + def wm_iconbitmap(self, bitmap: Incomplete | None = None, default: Incomplete | None = None): ... iconbitmap = wm_iconbitmap def wm_iconify(self) -> None: ... iconify = wm_iconify - def wm_iconmask(self, bitmap: Incomplete | None = ...): ... + def wm_iconmask(self, bitmap: Incomplete | None = None): ... iconmask = wm_iconmask - def wm_iconname(self, newName: Incomplete | None = ...) -> str: ... + def wm_iconname(self, newName: Incomplete | None = None) -> str: ... iconname = wm_iconname def wm_iconphoto(self, default: bool, __image1: Image, *args: Image) -> None: ... iconphoto = wm_iconphoto - def wm_iconposition(self, x: int | None = ..., y: int | None = ...) -> tuple[int, int] | None: ... + def wm_iconposition(self, x: int | None = None, y: int | None = None) -> tuple[int, int] | None: ... iconposition = wm_iconposition - def wm_iconwindow(self, pathName: Incomplete | None = ...): ... + def wm_iconwindow(self, pathName: Incomplete | None = None): ... iconwindow = wm_iconwindow def wm_manage(self, widget) -> None: ... manage = wm_manage @overload - def wm_maxsize(self, width: None = ..., height: None = ...) -> tuple[int, int]: ... + def wm_maxsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... @overload def wm_maxsize(self, width: int, height: int) -> None: ... maxsize = wm_maxsize @overload - def wm_minsize(self, width: None = ..., height: None = ...) -> tuple[int, int]: ... + def wm_minsize(self, width: None = None, height: None = None) -> tuple[int, int]: ... @overload def wm_minsize(self, width: int, height: int) -> None: ... minsize = wm_minsize @overload - def wm_overrideredirect(self, boolean: None = ...) -> bool | None: ... # returns True or None + def wm_overrideredirect(self, boolean: None = None) -> bool | None: ... # returns True or None @overload def wm_overrideredirect(self, boolean: bool) -> None: ... overrideredirect = wm_overrideredirect - def wm_positionfrom(self, who: Literal["program", "user"] | None = ...) -> Literal["", "program", "user"]: ... + def wm_positionfrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... positionfrom = wm_positionfrom @overload def wm_protocol(self, name: str, func: Callable[[], object] | str) -> None: ... @overload - def wm_protocol(self, name: str, func: None = ...) -> str: ... + def wm_protocol(self, name: str, func: None = None) -> str: ... @overload - def wm_protocol(self, name: None = ..., func: None = ...) -> tuple[str, ...]: ... + def wm_protocol(self, name: None = None, func: None = None) -> tuple[str, ...]: ... protocol = wm_protocol @overload - def wm_resizable(self, width: None = ..., height: None = ...) -> tuple[bool, bool]: ... + def wm_resizable(self, width: None = None, height: None = None) -> tuple[bool, bool]: ... @overload def wm_resizable(self, width: bool, height: bool) -> None: ... resizable = wm_resizable - def wm_sizefrom(self, who: Literal["program", "user"] | None = ...) -> Literal["", "program", "user"]: ... + def wm_sizefrom(self, who: Literal["program", "user"] | None = None) -> Literal["", "program", "user"]: ... sizefrom = wm_sizefrom @overload - def wm_state(self, newstate: None = ...) -> str: ... + def wm_state(self, newstate: None = None) -> str: ... @overload def wm_state(self, newstate: str) -> None: ... state = wm_state @overload - def wm_title(self, string: None = ...) -> str: ... + def wm_title(self, string: None = None) -> str: ... @overload def wm_title(self, string: str) -> None: ... title = wm_title @overload - def wm_transient(self, master: None = ...) -> _tkinter.Tcl_Obj: ... + def wm_transient(self, master: None = None) -> _tkinter.Tcl_Obj: ... @overload def wm_transient(self, master: Wm | _tkinter.Tcl_Obj) -> None: ... transient = wm_transient @@ -735,17 +733,17 @@ class Tk(Misc, Wm): # args. # use `git grep screenName` to find them self, - screenName: str | None = ..., - baseName: str | None = ..., - className: str = ..., - useTk: bool = ..., - sync: bool = ..., - use: str | None = ..., + screenName: str | None = None, + baseName: str | None = None, + className: str = "Tk", + useTk: bool = True, + sync: bool = False, + use: str | None = None, ) -> None: ... @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -802,7 +800,7 @@ class Tk(Misc, Wm): def wantobjects(self, *args, **kwargs): ... def willdispatch(self): ... -def Tcl(screenName: str | None = ..., baseName: str | None = ..., className: str = ..., useTk: bool = ...) -> Tk: ... +def Tcl(screenName: str | None = None, baseName: str | None = None, className: str = "Tk", useTk: bool = False) -> Tk: ... _InMiscTotal = TypedDict("_InMiscTotal", {"in": Misc}) _InMiscNonTotal = TypedDict("_InMiscNonTotal", {"in": Misc}, total=False) @@ -933,14 +931,14 @@ class Widget(BaseWidget, Pack, Place, Grid): @overload def bind( self: _W, - sequence: str | None = ..., - func: Callable[[Event[_W]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[_W]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... @overload - def bind(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... class Toplevel(BaseWidget, Wm): # Toplevel and Tk have the same options because they correspond to the same @@ -948,7 +946,7 @@ class Toplevel(BaseWidget, Wm): # copy/pasted here instead of aliasing as 'config = Tk.config'. def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -978,7 +976,7 @@ class Toplevel(BaseWidget, Wm): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -1004,7 +1002,7 @@ class Toplevel(BaseWidget, Wm): class Button(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -1053,7 +1051,7 @@ class Button(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -1101,7 +1099,7 @@ class Button(Widget): class Canvas(Widget, XView, YView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -1144,7 +1142,7 @@ class Canvas(Widget, XView, YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -1189,8 +1187,8 @@ class Canvas(Widget, XView, YView): newtag: str, x: _ScreenUnits, y: _ScreenUnits, - halo: _ScreenUnits | None = ..., - start: str | _CanvasItemId | None = ..., + halo: _ScreenUnits | None = None, + start: str | _CanvasItemId | None = None, ) -> None: ... def addtag_enclosed(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... def addtag_overlapping(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... @@ -1200,7 +1198,7 @@ class Canvas(Widget, XView, YView): def find_all(self) -> tuple[_CanvasItemId, ...]: ... def find_below(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... def find_closest( - self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = ..., start: str | _CanvasItemId | None = ... + self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = None, start: str | _CanvasItemId | None = None ) -> tuple[_CanvasItemId, ...]: ... def find_enclosed( self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits @@ -1213,19 +1211,19 @@ class Canvas(Widget, XView, YView): def tag_bind( self, tagOrId: str | _CanvasItemId, - sequence: str | None = ..., - func: Callable[[Event[Canvas]], object] | None = ..., - add: Literal["", "+"] | bool | None = ..., + sequence: str | None = None, + func: Callable[[Event[Canvas]], object] | None = None, + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload def tag_bind( - self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ... + self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None ) -> None: ... @overload - def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... - def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = ...) -> None: ... - def canvasx(self, screenx, gridspacing: Incomplete | None = ...): ... - def canvasy(self, screeny, gridspacing: Incomplete | None = ...): ... + def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = None) -> None: ... + def canvasx(self, screenx, gridspacing: Incomplete | None = None): ... + def canvasy(self, screeny, gridspacing: Incomplete | None = None): ... @overload def coords(self, __tagOrId: str | _CanvasItemId) -> list[float]: ... @overload @@ -1716,12 +1714,12 @@ class Canvas(Widget, XView, YView): def itemcget(self, tagOrId, option): ... # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( - self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = ..., **kw: Any + self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, str, str]] | None: ... itemconfig = itemconfigure def move(self, *args) -> None: ... if sys.version_info >= (3, 8): - def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = ..., y: Literal[""] | float = ...) -> None: ... + def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... def postscript(self, cnf=..., **kw): ... # tkinter does: @@ -1736,7 +1734,7 @@ class Canvas(Widget, XView, YView): def lift(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] def scale(self, *args) -> None: ... def scan_mark(self, x, y) -> None: ... - def scan_dragto(self, x, y, gain: int = ...) -> None: ... + def scan_dragto(self, x, y, gain: int = 10) -> None: ... def select_adjust(self, tagOrId, index) -> None: ... def select_clear(self) -> None: ... def select_from(self, tagOrId, index) -> None: ... @@ -1747,7 +1745,7 @@ class Canvas(Widget, XView, YView): class Checkbutton(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -1807,7 +1805,7 @@ class Checkbutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -1866,7 +1864,7 @@ _EntryIndex: TypeAlias = str | int # "INDICES" in manual page class Entry(Widget, XView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -1911,7 +1909,7 @@ class Entry(Widget, XView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -1954,7 +1952,7 @@ class Entry(Widget, XView): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def delete(self, first: _EntryIndex, last: _EntryIndex | None = ...) -> None: ... + def delete(self, first: _EntryIndex, last: _EntryIndex | None = None) -> None: ... def get(self) -> str: ... def icursor(self, index: _EntryIndex) -> None: ... def index(self, index: _EntryIndex) -> int: ... @@ -1977,7 +1975,7 @@ class Entry(Widget, XView): class Frame(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -2004,7 +2002,7 @@ class Frame(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -2029,7 +2027,7 @@ class Frame(Widget): class Label(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2068,7 +2066,7 @@ class Label(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -2109,7 +2107,7 @@ class Label(Widget): class Listbox(Widget, XView, YView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activestyle: Literal["dotbox", "none", "underline"] = ..., @@ -2160,7 +2158,7 @@ class Listbox(Widget, XView, YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activestyle: Literal["dotbox", "none", "underline"] = ..., background: _Color = ..., @@ -2198,8 +2196,8 @@ class Listbox(Widget, XView, YView): def activate(self, index: str | int) -> None: ... def bbox(self, index: str | int) -> tuple[int, int, int, int] | None: ... # type: ignore[override] def curselection(self): ... - def delete(self, first: str | int, last: str | int | None = ...) -> None: ... - def get(self, first: str | int, last: str | int | None = ...): ... + def delete(self, first: str | int, last: str | int | None = None) -> None: ... + def get(self, first: str | int, last: str | int | None = None): ... def index(self, index: str | int) -> int: ... def insert(self, index: str | int, *elements: str | float) -> None: ... def nearest(self, y): ... @@ -2208,21 +2206,21 @@ class Listbox(Widget, XView, YView): def see(self, index: str | int) -> None: ... def selection_anchor(self, index: str | int) -> None: ... select_anchor = selection_anchor - def selection_clear(self, first: str | int, last: str | int | None = ...) -> None: ... # type: ignore[override] + def selection_clear(self, first: str | int, last: str | int | None = None) -> None: ... # type: ignore[override] select_clear = selection_clear def selection_includes(self, index: str | int): ... select_includes = selection_includes - def selection_set(self, first: str | int, last: str | int | None = ...) -> None: ... + def selection_set(self, first: str | int, last: str | int | None = None) -> None: ... select_set = selection_set def size(self) -> int: ... # type: ignore[override] def itemcget(self, index: str | int, option): ... - def itemconfigure(self, index: str | int, cnf: Incomplete | None = ..., **kw): ... + def itemconfigure(self, index: str | int, cnf: Incomplete | None = None, **kw): ... itemconfig = itemconfigure class Menu(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2254,7 +2252,7 @@ class Menu(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeborderwidth: _ScreenUnits = ..., @@ -2281,7 +2279,7 @@ class Menu(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def tk_popup(self, x: int, y: int, entry: str | int = ...) -> None: ... + def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... def activate(self, index: str | int) -> None: ... def add(self, itemType, cnf=..., **kw): ... # docstring says "Internal function." def insert(self, index, itemType, cnf=..., **kw): ... # docstring says "Internal function." @@ -2475,10 +2473,10 @@ class Menu(Widget): variable: Variable = ..., ) -> None: ... def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... - def delete(self, index1: str | int, index2: str | int | None = ...) -> None: ... + def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... def entrycget(self, index: str | int, option: str) -> Any: ... def entryconfigure( - self, index: str | int, cnf: dict[str, Any] | None = ..., **kw: Any + self, index: str | int, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... entryconfig = entryconfigure def index(self, index: str | int) -> int | None: ... @@ -2492,7 +2490,7 @@ class Menu(Widget): class Menubutton(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2534,7 +2532,7 @@ class Menubutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -2578,7 +2576,7 @@ class Menubutton(Widget): class Message(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, anchor: _Anchor = ..., @@ -2609,7 +2607,7 @@ class Message(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, anchor: _Anchor = ..., aspect: int = ..., @@ -2641,7 +2639,7 @@ class Message(Widget): class Radiobutton(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2690,7 +2688,7 @@ class Radiobutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activeforeground: _Color = ..., @@ -2745,7 +2743,7 @@ class Radiobutton(Widget): class Scale(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2788,7 +2786,7 @@ class Scale(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., background: _Color = ..., @@ -2830,13 +2828,13 @@ class Scale(Widget): config = configure def get(self) -> float: ... def set(self, value) -> None: ... - def coords(self, value: float | None = ...) -> tuple[int, int]: ... + def coords(self, value: float | None = None) -> tuple[int, int]: ... def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... class Scrollbar(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -2869,7 +2867,7 @@ class Scrollbar(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., activerelief: _Relief = ..., @@ -2896,7 +2894,7 @@ class Scrollbar(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index: Incomplete | None = ...): ... + def activate(self, index: Incomplete | None = None): ... def delta(self, deltax: int, deltay: int) -> float: ... def fraction(self, x: int, y: int) -> float: ... def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... @@ -2908,7 +2906,7 @@ _TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc class Text(Widget, XView, YView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, autoseparators: bool = ..., @@ -2965,7 +2963,7 @@ class Text(Widget, XView, YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, autoseparators: bool = ..., background: _Color = ..., @@ -3020,17 +3018,17 @@ class Text(Widget, XView, YView): def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: ... def count(self, index1, index2, *args): ... # TODO @overload - def debug(self, boolean: None = ...) -> bool: ... + def debug(self, boolean: None = None) -> bool: ... @overload def debug(self, boolean: bool) -> None: ... - def delete(self, index1: _TextIndex, index2: _TextIndex | None = ...) -> None: ... + def delete(self, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ... def dlineinfo(self, index: _TextIndex) -> tuple[int, int, int, int, int] | None: ... @overload def dump( self, index1: _TextIndex, - index2: _TextIndex | None = ..., - command: None = ..., + index2: _TextIndex | None = None, + command: None = None, *, all: bool = ..., image: bool = ..., @@ -3057,7 +3055,7 @@ class Text(Widget, XView, YView): def dump( self, index1: _TextIndex, - index2: _TextIndex | None = ..., + index2: _TextIndex | None = None, *, command: Callable[[str, str, str], object] | str, all: bool = ..., @@ -3069,23 +3067,23 @@ class Text(Widget, XView, YView): ) -> None: ... def edit(self, *args): ... # docstring says "Internal method" @overload - def edit_modified(self, arg: None = ...) -> bool: ... # actually returns Literal[0, 1] + def edit_modified(self, arg: None = None) -> bool: ... # actually returns Literal[0, 1] @overload def edit_modified(self, arg: bool) -> None: ... # actually returns empty string def edit_redo(self) -> None: ... # actually returns empty string def edit_reset(self) -> None: ... # actually returns empty string def edit_separator(self) -> None: ... # actually returns empty string def edit_undo(self) -> None: ... # actually returns empty string - def get(self, index1: _TextIndex, index2: _TextIndex | None = ...) -> str: ... + def get(self, index1: _TextIndex, index2: _TextIndex | None = None) -> str: ... # TODO: image_* methods def image_cget(self, index, option): ... - def image_configure(self, index, cnf: Incomplete | None = ..., **kw): ... + def image_configure(self, index, cnf: Incomplete | None = None, **kw): ... def image_create(self, index, cnf=..., **kw): ... def image_names(self): ... def index(self, index: _TextIndex) -> str: ... def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... @overload - def mark_gravity(self, markName: str, direction: None = ...) -> Literal["left", "right"]: ... + def mark_gravity(self, markName: str, direction: None = None) -> Literal["left", "right"]: ... @overload def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string def mark_names(self) -> tuple[str, ...]: ... @@ -3103,14 +3101,14 @@ class Text(Widget, XView, YView): self, pattern: str, index: _TextIndex, - stopindex: _TextIndex | None = ..., - forwards: bool | None = ..., - backwards: bool | None = ..., - exact: bool | None = ..., - regexp: bool | None = ..., - nocase: bool | None = ..., - count: Variable | None = ..., - elide: bool | None = ..., + stopindex: _TextIndex | None = None, + forwards: bool | None = None, + backwards: bool | None = None, + exact: bool | None = None, + regexp: bool | None = None, + nocase: bool | None = None, + count: Variable | None = None, + elide: bool | None = None, ) -> str: ... # returns empty string for not found def see(self, index: _TextIndex) -> None: ... def tag_add(self, tagName: str, index1: _TextIndex, *args: _TextIndex) -> None: ... @@ -3121,18 +3119,18 @@ class Text(Widget, XView, YView): tagName: str, sequence: str | None, func: Callable[[Event[Text]], object] | None, - add: Literal["", "+"] | bool | None = ..., + add: Literal["", "+"] | bool | None = None, ) -> str: ... @overload - def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... - def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = ...) -> None: ... + def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = None) -> None: ... # allowing any string for cget instead of just Literals because there's no other way to look up tag options def tag_cget(self, tagName: str, option: str): ... @overload def tag_configure( self, tagName: str, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bgstipple: _Bitmap = ..., @@ -3167,24 +3165,28 @@ class Text(Widget, XView, YView): def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... tag_config = tag_configure def tag_delete(self, __first_tag_name: str, *tagNames: str) -> None: ... # error if no tag names given - def tag_lower(self, tagName: str, belowThis: str | None = ...) -> None: ... - def tag_names(self, index: _TextIndex | None = ...) -> tuple[str, ...]: ... - def tag_nextrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> tuple[str, str] | tuple[()]: ... - def tag_prevrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> tuple[str, str] | tuple[()]: ... - def tag_raise(self, tagName: str, aboveThis: str | None = ...) -> None: ... + def tag_lower(self, tagName: str, belowThis: str | None = None) -> None: ... + def tag_names(self, index: _TextIndex | None = None) -> tuple[str, ...]: ... + def tag_nextrange( + self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None + ) -> tuple[str, str] | tuple[()]: ... + def tag_prevrange( + self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None + ) -> tuple[str, str] | tuple[()]: ... + def tag_raise(self, tagName: str, aboveThis: str | None = None) -> None: ... def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... # tag_remove and tag_delete are different - def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> None: ... + def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = None) -> None: ... # TODO: window_* methods def window_cget(self, index, option): ... - def window_configure(self, index, cnf: Incomplete | None = ..., **kw): ... + def window_configure(self, index, cnf: Incomplete | None = None, **kw): ... window_config = window_configure def window_create(self, index, cnf=..., **kw) -> None: ... def window_names(self): ... def yview_pickplace(self, *what): ... # deprecated class _setit: - def __init__(self, var, value, callback: Incomplete | None = ...) -> None: ... + def __init__(self, var, value, callback: Incomplete | None = None) -> None: ... def __call__(self, *args) -> None: ... # manual page: tk_optionMenu @@ -3213,7 +3215,7 @@ class Image: name: Incomplete tk: _tkinter.TkappType def __init__( - self, imgtype, name: Incomplete | None = ..., cnf=..., master: Misc | _tkinter.TkappType | None = ..., **kw + self, imgtype, name: Incomplete | None = None, cnf=..., master: Misc | _tkinter.TkappType | None = None, **kw ) -> None: ... def __del__(self) -> None: ... def __setitem__(self, key, value) -> None: ... @@ -3227,9 +3229,9 @@ class Image: class PhotoImage(Image): def __init__( self, - name: str | None = ..., + name: str | None = None, cnf: dict[str, Any] = ..., - master: Misc | _tkinter.TkappType | None = ..., + master: Misc | _tkinter.TkappType | None = None, *, data: str | bytes = ..., # not same as data argument of put() format: str = ..., @@ -3255,8 +3257,8 @@ class PhotoImage(Image): def cget(self, option: str) -> str: ... def __getitem__(self, key: str) -> str: ... # always string: image['height'] can be '0' def copy(self) -> PhotoImage: ... - def zoom(self, x: int, y: int | Literal[""] = ...) -> PhotoImage: ... - def subsample(self, x: int, y: int | Literal[""] = ...) -> PhotoImage: ... + def zoom(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... + def subsample(self, x: int, y: int | Literal[""] = "") -> PhotoImage: ... def get(self, x: int, y: int) -> tuple[int, int, int]: ... def put( self, @@ -3269,9 +3271,9 @@ class PhotoImage(Image): | tuple[list[_Color], ...] | tuple[tuple[_Color, ...], ...] ), - to: tuple[int, int] | None = ..., + to: tuple[int, int] | None = None, ) -> None: ... - def write(self, filename: StrOrBytesPath, format: str | None = ..., from_coords: tuple[int, int] | None = ...) -> None: ... + def write(self, filename: StrOrBytesPath, format: str | None = None, from_coords: tuple[int, int] | None = None) -> None: ... if sys.version_info >= (3, 8): def transparency_get(self, x: int, y: int) -> bool: ... def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... @@ -3279,9 +3281,9 @@ class PhotoImage(Image): class BitmapImage(Image): def __init__( self, - name: Incomplete | None = ..., + name: Incomplete | None = None, cnf: dict[str, Any] = ..., - master: Misc | _tkinter.TkappType | None = ..., + master: Misc | _tkinter.TkappType | None = None, *, background: _Color = ..., data: str | bytes = ..., @@ -3297,7 +3299,7 @@ def image_types() -> tuple[str, ...]: ... class Spinbox(Widget, XView): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, activebackground: _Color = ..., @@ -3356,7 +3358,7 @@ class Spinbox(Widget, XView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, activebackground: _Color = ..., background: _Color = ..., @@ -3413,7 +3415,7 @@ class Spinbox(Widget, XView): def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] - def delete(self, first, last: Incomplete | None = ...) -> Literal[""]: ... + def delete(self, first, last: Incomplete | None = None) -> Literal[""]: ... def get(self) -> str: ... def icursor(self, index): ... def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... @@ -3427,7 +3429,7 @@ class Spinbox(Widget, XView): def selection(self, *args) -> tuple[int, ...]: ... def selection_adjust(self, index): ... def selection_clear(self): ... - def selection_element(self, element: Incomplete | None = ...): ... + def selection_element(self, element: Incomplete | None = None): ... if sys.version_info >= (3, 8): def selection_from(self, index: int) -> None: ... def selection_present(self) -> None: ... @@ -3437,7 +3439,7 @@ class Spinbox(Widget, XView): class LabelFrame(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -3471,7 +3473,7 @@ class LabelFrame(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -3502,7 +3504,7 @@ class LabelFrame(Widget): class PanedWindow(Widget): def __init__( self, - master: Misc | None = ..., + master: Misc | None = None, cnf: dict[str, Any] | None = ..., *, background: _Color = ..., @@ -3531,7 +3533,7 @@ class PanedWindow(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: _Color = ..., bd: _ScreenUnits = ..., @@ -3571,7 +3573,7 @@ class PanedWindow(Widget): def sash_mark(self, index): ... def sash_place(self, index, x, y): ... def panecget(self, child, option): ... - def paneconfigure(self, tagOrId, cnf: Incomplete | None = ..., **kw): ... + def paneconfigure(self, tagOrId, cnf: Incomplete | None = None, **kw): ... paneconfig: Incomplete def panes(self): ... diff --git a/mypy/typeshed/stdlib/tkinter/colorchooser.pyi b/mypy/typeshed/stdlib/tkinter/colorchooser.pyi index ac2ea187bdd5..4300d94f58e8 100644 --- a/mypy/typeshed/stdlib/tkinter/colorchooser.pyi +++ b/mypy/typeshed/stdlib/tkinter/colorchooser.pyi @@ -1,4 +1,5 @@ import sys +from tkinter import Misc, _Color from tkinter.commondialog import Dialog from typing import ClassVar @@ -8,4 +9,12 @@ if sys.version_info >= (3, 9): class Chooser(Dialog): command: ClassVar[str] -def askcolor(color: str | bytes | None = ..., **options) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... +if sys.version_info >= (3, 9): + def askcolor( + color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + ) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... + +else: + def askcolor( + color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + ) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... diff --git a/mypy/typeshed/stdlib/tkinter/commondialog.pyi b/mypy/typeshed/stdlib/tkinter/commondialog.pyi index 49101c7e6089..eba3ab5be3bd 100644 --- a/mypy/typeshed/stdlib/tkinter/commondialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/commondialog.pyi @@ -10,5 +10,5 @@ class Dialog: command: ClassVar[str | None] master: Incomplete | None options: Mapping[str, Incomplete] - def __init__(self, master: Incomplete | None = ..., **options) -> None: ... - def show(self, **options): ... + def __init__(self, master: Incomplete | None = None, **options: Incomplete) -> None: ... + def show(self, **options: Incomplete) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/tkinter/dialog.pyi b/mypy/typeshed/stdlib/tkinter/dialog.pyi index ef7713f40994..8825188c767e 100644 --- a/mypy/typeshed/stdlib/tkinter/dialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/dialog.pyi @@ -12,5 +12,5 @@ DIALOG_ICON: str class Dialog(Widget): widgetName: str num: int - def __init__(self, master: Incomplete | None = ..., cnf: Mapping[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: Incomplete | None = None, cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... def destroy(self) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/dnd.pyi b/mypy/typeshed/stdlib/tkinter/dnd.pyi index e2cfc43f606a..4a6ab42b3e33 100644 --- a/mypy/typeshed/stdlib/tkinter/dnd.pyi +++ b/mypy/typeshed/stdlib/tkinter/dnd.pyi @@ -11,9 +11,9 @@ class _DndSource(Protocol): class DndHandler: root: ClassVar[Tk | None] def __init__(self, source: _DndSource, event: Event[Misc]) -> None: ... - def cancel(self, event: Event[Misc] | None = ...) -> None: ... - def finish(self, event: Event[Misc] | None, commit: int = ...) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... + def finish(self, event: Event[Misc] | None, commit: int = 0) -> None: ... def on_motion(self, event: Event[Misc]) -> None: ... def on_release(self, event: Event[Misc]) -> None: ... -def dnd_start(source, event) -> DndHandler | None: ... +def dnd_start(source: _DndSource, event: Event[Misc]) -> DndHandler | None: ... diff --git a/mypy/typeshed/stdlib/tkinter/filedialog.pyi b/mypy/typeshed/stdlib/tkinter/filedialog.pyi index d0b7e451f72c..10b36e4d3c06 100644 --- a/mypy/typeshed/stdlib/tkinter/filedialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/filedialog.pyi @@ -41,21 +41,21 @@ class FileDialog: filter_button: Button cancel_button: Button def __init__( - self, master, title: Incomplete | None = ... + self, master, title: Incomplete | None = None ) -> None: ... # title is usually a str or None, but e.g. int doesn't raise en exception either how: Incomplete | None - def go(self, dir_or_file=..., pattern: str = ..., default: str = ..., key: Incomplete | None = ...): ... - def quit(self, how: Incomplete | None = ...) -> None: ... + def go(self, dir_or_file=".", pattern: str = "*", default: str = "", key: Incomplete | None = None): ... + def quit(self, how: Incomplete | None = None) -> None: ... def dirs_double_event(self, event) -> None: ... def dirs_select_event(self, event) -> None: ... def files_double_event(self, event) -> None: ... def files_select_event(self, event) -> None: ... def ok_event(self, event) -> None: ... def ok_command(self) -> None: ... - def filter_command(self, event: Incomplete | None = ...) -> None: ... + def filter_command(self, event: Incomplete | None = None) -> None: ... def get_filter(self): ... def get_selection(self): ... - def cancel_command(self, event: Incomplete | None = ...) -> None: ... + def cancel_command(self, event: Incomplete | None = None) -> None: ... def set_filter(self, dir, pat) -> None: ... def set_selection(self, file) -> None: ... @@ -116,7 +116,7 @@ def askdirectory( # TODO: If someone actually uses these, overload to have the actual return type of open(..., mode) def asksaveasfile( - mode: str = ..., + mode: str = "w", *, confirmoverwrite: bool | None = ..., defaultextension: str | None = ..., @@ -128,7 +128,7 @@ def asksaveasfile( typevariable: StringVar | str | None = ..., ) -> IO[Incomplete] | None: ... def askopenfile( - mode: str = ..., + mode: str = "r", *, defaultextension: str | None = ..., filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., @@ -139,7 +139,7 @@ def askopenfile( typevariable: StringVar | str | None = ..., ) -> IO[Incomplete] | None: ... def askopenfiles( - mode: str = ..., + mode: str = "r", *, defaultextension: str | None = ..., filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., diff --git a/mypy/typeshed/stdlib/tkinter/font.pyi b/mypy/typeshed/stdlib/tkinter/font.pyi index dff84e9fac78..0a557e921914 100644 --- a/mypy/typeshed/stdlib/tkinter/font.pyi +++ b/mypy/typeshed/stdlib/tkinter/font.pyi @@ -41,10 +41,10 @@ class Font: self, # In tkinter, 'root' refers to tkinter.Tk by convention, but the code # actually works with any tkinter widget so we use tkinter.Misc. - root: tkinter.Misc | None = ..., - font: _FontDescription | None = ..., - name: str | None = ..., - exists: bool = ..., + root: tkinter.Misc | None = None, + font: _FontDescription | None = None, + name: str | None = None, + exists: bool = False, *, family: str = ..., size: int = ..., @@ -68,19 +68,19 @@ class Font: def cget(self, option: str) -> Any: ... __getitem__ = cget @overload - def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = ...) -> str: ... + def actual(self, option: Literal["family"], displayof: tkinter.Misc | None = None) -> str: ... @overload - def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = ...) -> int: ... + def actual(self, option: Literal["size"], displayof: tkinter.Misc | None = None) -> int: ... @overload - def actual(self, option: Literal["weight"], displayof: tkinter.Misc | None = ...) -> Literal["normal", "bold"]: ... + def actual(self, option: Literal["weight"], displayof: tkinter.Misc | None = None) -> Literal["normal", "bold"]: ... @overload - def actual(self, option: Literal["slant"], displayof: tkinter.Misc | None = ...) -> Literal["roman", "italic"]: ... + def actual(self, option: Literal["slant"], displayof: tkinter.Misc | None = None) -> Literal["roman", "italic"]: ... @overload - def actual(self, option: Literal["underline", "overstrike"], displayof: tkinter.Misc | None = ...) -> bool: ... + def actual(self, option: Literal["underline", "overstrike"], displayof: tkinter.Misc | None = None) -> bool: ... @overload - def actual(self, option: None, displayof: tkinter.Misc | None = ...) -> _FontDict: ... + def actual(self, option: None, displayof: tkinter.Misc | None = None) -> _FontDict: ... @overload - def actual(self, *, displayof: tkinter.Misc | None = ...) -> _FontDict: ... + def actual(self, *, displayof: tkinter.Misc | None = None) -> _FontDict: ... def config( self, *, @@ -99,14 +99,14 @@ class Font: def metrics(self, __option: Literal["fixed"], *, displayof: tkinter.Misc | None = ...) -> bool: ... @overload def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... - def measure(self, text: str, displayof: tkinter.Misc | None = ...) -> int: ... + def measure(self, text: str, displayof: tkinter.Misc | None = None) -> int: ... def __eq__(self, other: object) -> bool: ... -def families(root: tkinter.Misc | None = ..., displayof: tkinter.Misc | None = ...) -> tuple[str, ...]: ... -def names(root: tkinter.Misc | None = ...) -> tuple[str, ...]: ... +def families(root: tkinter.Misc | None = None, displayof: tkinter.Misc | None = None) -> tuple[str, ...]: ... +def names(root: tkinter.Misc | None = None) -> tuple[str, ...]: ... if sys.version_info >= (3, 10): - def nametofont(name: str, root: tkinter.Misc | None = ...) -> Font: ... + def nametofont(name: str, root: tkinter.Misc | None = None) -> Font: ... else: def nametofont(name: str) -> Font: ... diff --git a/mypy/typeshed/stdlib/tkinter/messagebox.pyi b/mypy/typeshed/stdlib/tkinter/messagebox.pyi index d99c588e3cd3..5a04b66d7866 100644 --- a/mypy/typeshed/stdlib/tkinter/messagebox.pyi +++ b/mypy/typeshed/stdlib/tkinter/messagebox.pyi @@ -34,11 +34,11 @@ NO: str class Message(Dialog): command: ClassVar[str] -def showinfo(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def showwarning(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def showerror(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def askquestion(title: str | None = ..., message: str | None = ..., **options) -> str: ... -def askokcancel(title: str | None = ..., message: str | None = ..., **options) -> bool: ... -def askyesno(title: str | None = ..., message: str | None = ..., **options) -> bool: ... -def askyesnocancel(title: str | None = ..., message: str | None = ..., **options) -> bool | None: ... -def askretrycancel(title: str | None = ..., message: str | None = ..., **options) -> bool: ... +def showinfo(title: str | None = None, message: str | None = None, **options) -> str: ... +def showwarning(title: str | None = None, message: str | None = None, **options) -> str: ... +def showerror(title: str | None = None, message: str | None = None, **options) -> str: ... +def askquestion(title: str | None = None, message: str | None = None, **options) -> str: ... +def askokcancel(title: str | None = None, message: str | None = None, **options) -> bool: ... +def askyesno(title: str | None = None, message: str | None = None, **options) -> bool: ... +def askyesnocancel(title: str | None = None, message: str | None = None, **options) -> bool | None: ... +def askretrycancel(title: str | None = None, message: str | None = None, **options) -> bool: ... diff --git a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi index 72f6ca8c0687..114f8c3de3ea 100644 --- a/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi +++ b/mypy/typeshed/stdlib/tkinter/scrolledtext.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete from tkinter import Frame, Misc, Scrollbar, Text __all__ = ["ScrolledText"] @@ -6,4 +7,4 @@ __all__ = ["ScrolledText"] class ScrolledText(Text): frame: Frame vbar: Scrollbar - def __init__(self, master: Misc | None = ..., **kwargs) -> None: ... + def __init__(self, master: Misc | None = None, **kwargs: Incomplete) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/simpledialog.pyi b/mypy/typeshed/stdlib/tkinter/simpledialog.pyi index 8ae8b6d286d0..2c57cce7371c 100644 --- a/mypy/typeshed/stdlib/tkinter/simpledialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/simpledialog.pyi @@ -1,11 +1,11 @@ from tkinter import Event, Frame, Misc, Toplevel class Dialog(Toplevel): - def __init__(self, parent: Misc | None, title: str | None = ...) -> None: ... + def __init__(self, parent: Misc | None, title: str | None = None) -> None: ... def body(self, master: Frame) -> Misc | None: ... def buttonbox(self) -> None: ... - def ok(self, event: Event[Misc] | None = ...) -> None: ... - def cancel(self, event: Event[Misc] | None = ...) -> None: ... + def ok(self, event: Event[Misc] | None = None) -> None: ... + def cancel(self, event: Event[Misc] | None = None) -> None: ... def validate(self) -> bool: ... def apply(self) -> None: ... @@ -13,12 +13,12 @@ class SimpleDialog: def __init__( self, master: Misc | None, - text: str = ..., + text: str = "", buttons: list[str] = ..., - default: int | None = ..., - cancel: int | None = ..., - title: str | None = ..., - class_: str | None = ..., + default: int | None = None, + cancel: int | None = None, + title: str | None = None, + class_: str | None = None, ) -> None: ... def go(self) -> int | None: ... def return_event(self, event: Event[Misc]) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/tix.pyi b/mypy/typeshed/stdlib/tkinter/tix.pyi index db568bc4abef..5dd6f040fab7 100644 --- a/mypy/typeshed/stdlib/tkinter/tix.pyi +++ b/mypy/typeshed/stdlib/tkinter/tix.pyi @@ -38,22 +38,22 @@ TCL_ALL_EVENTS: Literal[0] class tixCommand: def tix_addbitmapdir(self, directory: str) -> None: ... def tix_cget(self, option: str) -> Any: ... - def tix_configure(self, cnf: dict[str, Any] | None = ..., **kw: Any) -> Any: ... - def tix_filedialog(self, dlgclass: str | None = ...) -> str: ... + def tix_configure(self, cnf: dict[str, Any] | None = None, **kw: Any) -> Any: ... + def tix_filedialog(self, dlgclass: str | None = None) -> str: ... def tix_getbitmap(self, name: str) -> str: ... def tix_getimage(self, name: str) -> str: ... def tix_option_get(self, name: str) -> Any: ... - def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = ...) -> None: ... + def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = None) -> None: ... class Tk(tkinter.Tk, tixCommand): - def __init__(self, screenName: str | None = ..., baseName: str | None = ..., className: str = ...) -> None: ... + def __init__(self, screenName: str | None = None, baseName: str | None = None, className: str = "Tix") -> None: ... class TixWidget(tkinter.Widget): def __init__( self, - master: tkinter.Misc | None = ..., - widgetName: str | None = ..., - static_options: list[str] | None = ..., + master: tkinter.Misc | None = None, + widgetName: str | None = None, + static_options: list[str] | None = None, cnf: dict[str, Any] = ..., kw: dict[str, Any] = ..., ) -> None: ... @@ -62,52 +62,50 @@ class TixWidget(tkinter.Widget): def subwidget(self, name: str) -> tkinter.Widget: ... def subwidgets_all(self) -> list[tkinter.Widget]: ... def config_all(self, option: Any, value: Any) -> None: ... - def image_create(self, imgtype: str, cnf: dict[str, Any] = ..., master: tkinter.Widget | None = ..., **kw) -> None: ... + def image_create(self, imgtype: str, cnf: dict[str, Any] = ..., master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... class TixSubWidget(TixWidget): - def __init__( - self, master: tkinter.Widget, name: str, destroy_physically: int = ..., check_intermediate: int = ... - ) -> None: ... + def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: - def __init__(self, itemtype: str, cnf: dict[str, Any] = ..., *, master: tkinter.Widget | None = ..., **kw) -> None: ... + def __init__(self, itemtype: str, cnf: dict[str, Any] = ..., *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... def delete(self) -> None: ... def config(self, cnf: dict[str, Any] = ..., **kw): ... class Balloon(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = ..., **kw) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... def insert(self, index: int, str: str) -> None: ... def pick(self, index: int) -> None: ... class Control(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... def invoke(self) -> None: ... class LabelEntry(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class LabelFrame(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class Meter(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class OptionMenu(TixWidget): def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... @@ -129,7 +127,7 @@ class Select(TixWidget): def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def invoke(self, name: str) -> None: ... class DirList(TixWidget): @@ -164,13 +162,13 @@ class FileEntry(TixWidget): def file_dialog(self) -> None: ... class HList(TixWidget, tkinter.XView, tkinter.YView): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... - def add_child(self, parent: str | None = ..., cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def add_child(self, parent: str | None = None, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... def anchor_set(self, entry: str) -> None: ... def anchor_clear(self) -> None: ... # FIXME: Overload, certain combos return, others don't - def column_width(self, col: int = ..., width: int | None = ..., chars: int | None = ...) -> int | None: ... + def column_width(self, col: int = 0, width: int | None = None, chars: int | None = None) -> int | None: ... def delete_all(self) -> None: ... def delete_entry(self, entry: str) -> None: ... def delete_offsprings(self, entry: str) -> None: ... @@ -195,7 +193,7 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def indicator_size(self, entry: str) -> int: ... def info_anchor(self) -> str: ... def info_bbox(self, entry: str) -> tuple[int, int, int, int]: ... - def info_children(self, entry: str | None = ...) -> tuple[str, ...]: ... + def info_children(self, entry: str | None = None) -> tuple[str, ...]: ... def info_data(self, entry: str) -> Any: ... def info_dragsite(self) -> str: ... def info_dropsite(self) -> str: ... @@ -216,34 +214,34 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def see(self, entry: str) -> None: ... def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... def selection_includes(self, entry: str) -> bool: ... - def selection_set(self, first: str, last: str | None = ...) -> None: ... + def selection_set(self, first: str, last: str | None = None) -> None: ... def show_entry(self, entry: str) -> None: ... class CheckList(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def autosetmode(self) -> None: ... def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... def open(self, entrypath: str) -> None: ... - def getselection(self, mode: str = ...) -> tuple[str, ...]: ... + def getselection(self, mode: str = "on") -> tuple[str, ...]: ... def getstatus(self, entrypath: str) -> str: ... - def setstatus(self, entrypath: str, mode: str = ...) -> None: ... + def setstatus(self, entrypath: str, mode: str = "on") -> None: ... class Tree(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def autosetmode(self) -> None: ... def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... def open(self, entrypath: str) -> None: ... - def setmode(self, entrypath: str, mode: str = ...) -> None: ... + def setmode(self, entrypath: str, mode: str = "none") -> None: ... class TList(TixWidget, tkinter.XView, tkinter.YView): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... def anchor_set(self, index: int) -> None: ... def anchor_clear(self) -> None: ... - def delete(self, from_: int, to: int | None = ...) -> None: ... + def delete(self, from_: int, to: int | None = None) -> None: ... def dragsite_set(self, index: int) -> None: ... def dragsite_clear(self) -> None: ... def dropsite_set(self, index: int) -> None: ... @@ -261,7 +259,7 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def see(self, index: int) -> None: ... def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... def selection_includes(self, index: int) -> bool: ... - def selection_set(self, first: int, last: int | None = ...) -> None: ... + def selection_set(self, first: int, last: int | None = None) -> None: ... class PanedWindow(TixWidget): def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... @@ -280,7 +278,7 @@ class ListNoteBook(TixWidget): def raise_page(self, name: str) -> None: ... class NoteBook(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... def delete(self, name: str) -> None: ... def page(self, name: str) -> tkinter.Widget: ... @@ -289,7 +287,7 @@ class NoteBook(TixWidget): def raised(self) -> bool: ... class InputOnly(TixWidget): - def __init__(self, master: tkinter.Widget | None = ..., cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... class Form: def __setitem__(self, key: str, value: Any) -> None: ... @@ -297,6 +295,6 @@ class Form: def form(self, cnf: dict[str, Any] = ..., **kw) -> None: ... def check(self) -> bool: ... def forget(self) -> None: ... - def grid(self, xsize: int = ..., ysize: int = ...) -> tuple[int, int] | None: ... - def info(self, option: str | None = ...): ... + def grid(self, xsize: int = 0, ysize: int = 0) -> tuple[int, int] | None: ... + def info(self, option: str | None = None): ... def slaves(self) -> list[tkinter.Widget]: ... diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index a191b3be281a..61ebc0e2734f 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -4,7 +4,7 @@ import tkinter from _typeshed import Incomplete from collections.abc import Callable from tkinter.font import _FontDescription -from typing import Any, Union, overload +from typing import Any, overload from typing_extensions import Literal, TypeAlias, TypedDict __all__ = [ @@ -36,15 +36,15 @@ __all__ = [ ] def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... -def setup_master(master: Incomplete | None = ...): ... +def setup_master(master: Incomplete | None = None): ... -_Padding: TypeAlias = Union[ - tkinter._ScreenUnits, - tuple[tkinter._ScreenUnits], - tuple[tkinter._ScreenUnits, tkinter._ScreenUnits], - tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits], - tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits], -] +_Padding: TypeAlias = ( + tkinter._ScreenUnits + | tuple[tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits] + | tuple[tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits, tkinter._ScreenUnits] +) # from ttk_widget (aka ttk::widget) manual page, differs from tkinter._Compound _TtkCompound: TypeAlias = Literal["text", "image", tkinter._Compound] @@ -52,32 +52,32 @@ _TtkCompound: TypeAlias = Literal["text", "image", tkinter._Compound] class Style: master: Incomplete tk: _tkinter.TkappType - def __init__(self, master: tkinter.Misc | None = ...) -> None: ... - def configure(self, style, query_opt: Incomplete | None = ..., **kw): ... - def map(self, style, query_opt: Incomplete | None = ..., **kw): ... - def lookup(self, style, option, state: Incomplete | None = ..., default: Incomplete | None = ...): ... - def layout(self, style, layoutspec: Incomplete | None = ...): ... + def __init__(self, master: tkinter.Misc | None = None) -> None: ... + def configure(self, style, query_opt: Incomplete | None = None, **kw): ... + def map(self, style, query_opt: Incomplete | None = None, **kw): ... + def lookup(self, style, option, state: Incomplete | None = None, default: Incomplete | None = None): ... + def layout(self, style, layoutspec: Incomplete | None = None): ... def element_create(self, elementname, etype, *args, **kw) -> None: ... def element_names(self): ... def element_options(self, elementname): ... - def theme_create(self, themename, parent: Incomplete | None = ..., settings: Incomplete | None = ...) -> None: ... + def theme_create(self, themename, parent: Incomplete | None = None, settings: Incomplete | None = None) -> None: ... def theme_settings(self, themename, settings) -> None: ... def theme_names(self) -> tuple[str, ...]: ... @overload def theme_use(self, themename: str) -> None: ... @overload - def theme_use(self, themename: None = ...) -> str: ... + def theme_use(self, themename: None = None) -> str: ... class Widget(tkinter.Widget): - def __init__(self, master: tkinter.Misc | None, widgetname, kw: Incomplete | None = ...) -> None: ... + def __init__(self, master: tkinter.Misc | None, widgetname, kw: Incomplete | None = None) -> None: ... def identify(self, x: int, y: int) -> str: ... - def instate(self, statespec, callback: Incomplete | None = ..., *args, **kw): ... - def state(self, statespec: Incomplete | None = ...): ... + def instate(self, statespec, callback: Incomplete | None = None, *args, **kw): ... + def state(self, statespec: Incomplete | None = None): ... class Button(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: tkinter._ButtonCommand = ..., @@ -98,7 +98,7 @@ class Button(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: tkinter._ButtonCommand = ..., compound: _TtkCompound = ..., @@ -122,7 +122,7 @@ class Button(Widget): class Checkbutton(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: tkinter._ButtonCommand = ..., @@ -148,7 +148,7 @@ class Checkbutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: tkinter._ButtonCommand = ..., compound: _TtkCompound = ..., @@ -174,8 +174,8 @@ class Checkbutton(Widget): class Entry(Widget, tkinter.Entry): def __init__( self, - master: tkinter.Misc | None = ..., - widget: str | None = ..., + master: tkinter.Misc | None = None, + widget: str | None = None, *, background: tkinter._Color = ..., # undocumented class_: str = ..., @@ -199,7 +199,7 @@ class Entry(Widget, tkinter.Entry): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -224,7 +224,7 @@ class Entry(Widget, tkinter.Entry): @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -252,7 +252,7 @@ class Entry(Widget, tkinter.Entry): class Combobox(Entry): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, background: tkinter._Color = ..., # undocumented class_: str = ..., @@ -279,7 +279,7 @@ class Combobox(Entry): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -307,7 +307,7 @@ class Combobox(Entry): @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., cursor: tkinter._Cursor = ..., @@ -331,13 +331,13 @@ class Combobox(Entry): ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def current(self, newindex: int | None = ...) -> int: ... + def current(self, newindex: int | None = None) -> int: ... def set(self, value: Any) -> None: ... class Frame(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., @@ -354,7 +354,7 @@ class Frame(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., @@ -373,7 +373,7 @@ class Frame(Widget): class Label(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, anchor: tkinter._Anchor = ..., background: tkinter._Color = ..., @@ -401,7 +401,7 @@ class Label(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, anchor: tkinter._Anchor = ..., background: tkinter._Color = ..., @@ -431,7 +431,7 @@ class Label(Widget): class Labelframe(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., # undocumented @@ -452,7 +452,7 @@ class Labelframe(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., @@ -477,7 +477,7 @@ LabelFrame = Labelframe class Menubutton(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., compound: _TtkCompound = ..., @@ -498,7 +498,7 @@ class Menubutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, compound: _TtkCompound = ..., cursor: tkinter._Cursor = ..., @@ -521,7 +521,7 @@ class Menubutton(Widget): class Notebook(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -535,7 +535,7 @@ class Notebook(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., height: int = ..., @@ -564,15 +564,15 @@ class Notebook(Widget): def identify(self, x: int, y: int) -> str: ... def index(self, tab_id): ... def insert(self, pos, child, **kw) -> None: ... - def select(self, tab_id: Incomplete | None = ...): ... - def tab(self, tab_id, option: Incomplete | None = ..., **kw): ... + def select(self, tab_id: Incomplete | None = None): ... + def tab(self, tab_id, option: Incomplete | None = None, **kw): ... def tabs(self): ... def enable_traversal(self) -> None: ... class Panedwindow(Widget, tkinter.PanedWindow): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -588,7 +588,7 @@ class Panedwindow(Widget, tkinter.PanedWindow): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., height: int = ..., @@ -602,7 +602,7 @@ class Panedwindow(Widget, tkinter.PanedWindow): @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., height: int = ..., @@ -614,15 +614,15 @@ class Panedwindow(Widget, tkinter.PanedWindow): def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... forget: Incomplete def insert(self, pos, child, **kw) -> None: ... - def pane(self, pane, option: Incomplete | None = ..., **kw): ... - def sashpos(self, index, newpos: Incomplete | None = ...): ... + def pane(self, pane, option: Incomplete | None = None, **kw): ... + def sashpos(self, index, newpos: Incomplete | None = None): ... PanedWindow = Panedwindow class Progressbar(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -640,7 +640,7 @@ class Progressbar(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., length: tkinter._ScreenUnits = ..., @@ -656,14 +656,14 @@ class Progressbar(Widget): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def start(self, interval: Literal["idle"] | int | None = ...) -> None: ... - def step(self, amount: float | None = ...) -> None: ... + def start(self, interval: Literal["idle"] | int | None = None) -> None: ... + def step(self, amount: float | None = None) -> None: ... def stop(self) -> None: ... class Radiobutton(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: tkinter._ButtonCommand = ..., @@ -685,7 +685,7 @@ class Radiobutton(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: tkinter._ButtonCommand = ..., compound: _TtkCompound = ..., @@ -711,7 +711,7 @@ class Radiobutton(Widget): class Scale(Widget, tkinter.Scale): # type: ignore[misc] def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: str | Callable[[str], object] = ..., @@ -730,7 +730,7 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: str | Callable[[str], object] = ..., cursor: tkinter._Cursor = ..., @@ -750,7 +750,7 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: str | Callable[[str], object] = ..., cursor: tkinter._Cursor = ..., @@ -766,13 +766,13 @@ class Scale(Widget, tkinter.Scale): # type: ignore[misc] ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... - def get(self, x: int | None = ..., y: int | None = ...) -> float: ... + def get(self, x: int | None = None, y: int | None = None) -> float: ... # type ignore, because identify() methods of Widget and tkinter.Scale are incompatible class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., command: Callable[..., tuple[float, float] | None] | str = ..., @@ -785,7 +785,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., @@ -799,7 +799,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] @overload # type: ignore[override] def config( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., @@ -813,7 +813,7 @@ class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] class Separator(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -825,7 +825,7 @@ class Separator(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., orient: Literal["horizontal", "vertical"] = ..., @@ -839,7 +839,7 @@ class Separator(Widget): class Sizegrip(Widget): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., cursor: tkinter._Cursor = ..., @@ -850,7 +850,7 @@ class Sizegrip(Widget): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, cursor: tkinter._Cursor = ..., style: str = ..., @@ -863,7 +863,7 @@ class Sizegrip(Widget): class Spinbox(Entry): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, background: tkinter._Color = ..., # undocumented class_: str = ..., @@ -894,7 +894,7 @@ class Spinbox(Entry): @overload # type: ignore[override] def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, background: tkinter._Color = ..., command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., @@ -937,7 +937,7 @@ class _TreeviewTagDict(TypedDict): foreground: tkinter._Color background: tkinter._Color font: _FontDescription - image: Literal[""] | str # not wrapped in list :D + image: str # not wrapped in list :D class _TreeviewHeaderDict(TypedDict): text: str @@ -958,12 +958,12 @@ _TreeviewColumnId: TypeAlias = int | str # manual page: "COLUMN IDENTIFIERS" class Treeview(Widget, tkinter.XView, tkinter.YView): def __init__( self, - master: tkinter.Misc | None = ..., + master: tkinter.Misc | None = None, *, class_: str = ..., columns: str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., - displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] | Literal["#all"] = ..., + displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., height: int = ..., name: str = ..., padding: _Padding = ..., @@ -981,11 +981,11 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def configure( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = None, *, columns: str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., - displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] | Literal["#all"] = ..., + displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] = ..., height: int = ..., padding: _Padding = ..., selectmode: Literal["extended", "browse", "none"] = ..., @@ -998,8 +998,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, item, column: _TreeviewColumnId | None = ...) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] - def get_children(self, item: str | None = ...) -> tuple[str, ...]: ... + def bbox(self, item, column: _TreeviewColumnId | None = None) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | None = None) -> tuple[str, ...]: ... def set_children(self, item: str, *newchildren: str) -> None: ... @overload def column(self, column: _TreeviewColumnId, option: Literal["width", "minwidth"]) -> int: ... @@ -1015,7 +1015,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def column( self, column: _TreeviewColumnId, - option: None = ..., + option: None = None, *, width: int = ..., minwidth: int = ..., @@ -1027,7 +1027,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def detach(self, *items: str) -> None: ... def exists(self, item: str) -> bool: ... @overload # type: ignore[override] - def focus(self, item: None = ...) -> str: ... # can return empty string + def focus(self, item: None = None) -> str: ... # can return empty string @overload def focus(self, item: str) -> Literal[""]: ... @overload @@ -1041,12 +1041,12 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def heading(self, column: _TreeviewColumnId, option: str) -> Any: ... @overload - def heading(self, column: _TreeviewColumnId, option: None = ...) -> _TreeviewHeaderDict: ... # type: ignore[misc] + def heading(self, column: _TreeviewColumnId, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[misc] @overload def heading( self, column: _TreeviewColumnId, - option: None = ..., + option: None = None, *, text: str = ..., image: tkinter._ImageSpec = ..., @@ -1063,7 +1063,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): self, parent: str, index: int | Literal["end"], - iid: str | None = ..., + iid: str | None = None, *, id: str = ..., # same as iid text: str = ..., @@ -1085,12 +1085,12 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def item(self, item: str, option: str) -> Any: ... @overload - def item(self, item: str, option: None = ...) -> _TreeviewItemDict: ... # type: ignore[misc] + def item(self, item: str, option: None = None) -> _TreeviewItemDict: ... # type: ignore[misc] @overload def item( self, item: str, - option: None = ..., + option: None = None, *, text: str = ..., image: tkinter._ImageSpec = ..., @@ -1107,23 +1107,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): if sys.version_info >= (3, 8): def selection(self) -> tuple[str, ...]: ... else: - def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = ...) -> tuple[str, ...]: ... + def selection(self, selop: Incomplete | None = ..., items: Incomplete | None = None) -> tuple[str, ...]: ... def selection_set(self, items: str | list[str] | tuple[str, ...]) -> None: ... def selection_add(self, items: str | list[str] | tuple[str, ...]) -> None: ... def selection_remove(self, items: str | list[str] | tuple[str, ...]) -> None: ... def selection_toggle(self, items: str | list[str] | tuple[str, ...]) -> None: ... @overload - def set(self, item: str, column: None = ..., value: None = ...) -> dict[str, Any]: ... + def set(self, item: str, column: None = None, value: None = None) -> dict[str, Any]: ... @overload - def set(self, item: str, column: _TreeviewColumnId, value: None = ...) -> Any: ... + def set(self, item: str, column: _TreeviewColumnId, value: None = None) -> Any: ... @overload def set(self, item: str, column: _TreeviewColumnId, value: Any) -> Literal[""]: ... # There's no tag_unbind() or 'add' argument for whatever reason. # Also, it's 'callback' instead of 'func' here. @overload def tag_bind( - self, tagname: str, sequence: str | None = ..., callback: Callable[[tkinter.Event[Treeview]], object] | None = ... + self, tagname: str, sequence: str | None = None, callback: Callable[[tkinter.Event[Treeview]], object] | None = None ) -> str: ... @overload def tag_bind(self, tagname: str, sequence: str | None, callback: str) -> None: ... @@ -1139,7 +1139,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def tag_configure( self, tagname: str, - option: None = ..., + option: None = None, *, # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug foreground: tkinter._Color = ..., @@ -1148,7 +1148,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): image: tkinter._ImageSpec = ..., ) -> _TreeviewTagDict | Any: ... # can be None but annoying to check @overload - def tag_has(self, tagname: str, item: None = ...) -> tuple[str, ...]: ... + def tag_has(self, tagname: str, item: None = None) -> tuple[str, ...]: ... @overload def tag_has(self, tagname: str, item: str) -> bool: ... @@ -1158,10 +1158,10 @@ class LabeledScale(Frame): # TODO: don't any-type **kw. That goes to Frame.__init__. def __init__( self, - master: tkinter.Misc | None = ..., - variable: tkinter.IntVar | tkinter.DoubleVar | None = ..., - from_: float = ..., - to: float = ..., + master: tkinter.Misc | None = None, + variable: tkinter.IntVar | tkinter.DoubleVar | None = None, + from_: float = 0, + to: float = 10, *, compound: Literal["top", "bottom"] = ..., **kw, @@ -1174,7 +1174,7 @@ class OptionMenu(Menubutton): self, master, variable, - default: str | None = ..., + default: str | None = None, *values: str, # rest of these are keyword-only because *args syntax used above style: str = ..., @@ -1183,4 +1183,4 @@ class OptionMenu(Menubutton): ) -> None: ... # configure, config, cget, destroy are inherited from Menubutton # destroy and __setitem__ are overridden, signature does not change - def set_menu(self, default: Incomplete | None = ..., *values) -> None: ... + def set_menu(self, default: Incomplete | None = None, *values) -> None: ... diff --git a/mypy/typeshed/stdlib/tokenize.pyi b/mypy/typeshed/stdlib/tokenize.pyi index 1a67736e78de..ba57402fb845 100644 --- a/mypy/typeshed/stdlib/tokenize.pyi +++ b/mypy/typeshed/stdlib/tokenize.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import StrOrBytesPath +from _typeshed import FileDescriptorOrPath from collections.abc import Callable, Generator, Iterable, Sequence from re import Pattern from token import * @@ -115,7 +115,6 @@ class Untokenizer: prev_row: int prev_col: int encoding: str | None - def __init__(self) -> None: ... def add_whitespace(self, start: _Position) -> None: ... def untokenize(self, iterable: Iterable[_Token]) -> str: ... def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ... @@ -123,10 +122,10 @@ class Untokenizer: # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode def untokenize(iterable: Iterable[_Token]) -> Any: ... -def detect_encoding(readline: Callable[[], bytes]) -> tuple[str, Sequence[bytes]]: ... -def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... +def detect_encoding(readline: Callable[[], bytes | bytearray]) -> tuple[str, Sequence[bytes]]: ... +def tokenize(readline: Callable[[], bytes | bytearray]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented -def open(filename: StrOrBytesPath | int) -> TextIO: ... +def open(filename: FileDescriptorOrPath) -> TextIO: ... def group(*choices: str) -> str: ... # undocumented def any(*choices: str) -> str: ... # undocumented def maybe(*choices: str) -> str: ... # undocumented diff --git a/mypy/typeshed/stdlib/trace.pyi b/mypy/typeshed/stdlib/trace.pyi index 1f0de1d4d964..f79b38f1ce82 100644 --- a/mypy/typeshed/stdlib/trace.pyi +++ b/mypy/typeshed/stdlib/trace.pyi @@ -14,35 +14,35 @@ _FileModuleFunction: TypeAlias = tuple[str, str | None, str] class CoverageResults: def __init__( self, - counts: dict[tuple[str, int], int] | None = ..., - calledfuncs: dict[_FileModuleFunction, int] | None = ..., - infile: StrPath | None = ..., - callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = ..., - outfile: StrPath | None = ..., + counts: dict[tuple[str, int], int] | None = None, + calledfuncs: dict[_FileModuleFunction, int] | None = None, + infile: StrPath | None = None, + callers: dict[tuple[_FileModuleFunction, _FileModuleFunction], int] | None = None, + outfile: StrPath | None = None, ) -> None: ... # undocumented def update(self, other: CoverageResults) -> None: ... - def write_results(self, show_missing: bool = ..., summary: bool = ..., coverdir: StrPath | None = ...) -> None: ... + def write_results(self, show_missing: bool = True, summary: bool = False, coverdir: StrPath | None = None) -> None: ... def write_results_file( - self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = ... + self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = None ) -> tuple[int, int]: ... def is_ignored_filename(self, filename: str) -> bool: ... # undocumented class Trace: def __init__( self, - count: int = ..., - trace: int = ..., - countfuncs: int = ..., - countcallers: int = ..., + count: int = 1, + trace: int = 1, + countfuncs: int = 0, + countcallers: int = 0, ignoremods: Sequence[str] = ..., ignoredirs: Sequence[str] = ..., - infile: StrPath | None = ..., - outfile: StrPath | None = ..., - timing: bool = ..., + infile: StrPath | None = None, + outfile: StrPath | None = None, + timing: bool = False, ) -> None: ... def run(self, cmd: str | types.CodeType) -> None: ... def runctx( - self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = ..., locals: Mapping[str, Any] | None = ... + self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = None, locals: Mapping[str, Any] | None = None ) -> None: ... if sys.version_info >= (3, 9): def runfunc(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... diff --git a/mypy/typeshed/stdlib/traceback.pyi b/mypy/typeshed/stdlib/traceback.pyi index fcaa39bf42f7..4483a8c2a1b0 100644 --- a/mypy/typeshed/stdlib/traceback.pyi +++ b/mypy/typeshed/stdlib/traceback.pyi @@ -1,9 +1,9 @@ import sys -from _typeshed import Self, SupportsWrite +from _typeshed import SupportsWrite from collections.abc import Generator, Iterable, Iterator, Mapping from types import FrameType, TracebackType from typing import Any, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "extract_stack", @@ -29,7 +29,7 @@ __all__ = [ _PT: TypeAlias = tuple[str, int, str, str | None] -def print_tb(tb: TracebackType | None, limit: int | None = ..., file: SupportsWrite[str] | None = ...) -> None: ... +def print_tb(tb: TracebackType | None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 10): @overload @@ -37,51 +37,51 @@ if sys.version_info >= (3, 10): __exc: type[BaseException] | None, value: BaseException | None = ..., tb: TracebackType | None = ..., - limit: int | None = ..., - file: SupportsWrite[str] | None = ..., - chain: bool = ..., + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, ) -> None: ... @overload def print_exception( - __exc: BaseException, *, limit: int | None = ..., file: SupportsWrite[str] | None = ..., chain: bool = ... + __exc: BaseException, *, limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True ) -> None: ... @overload def format_exception( __exc: type[BaseException] | None, value: BaseException | None = ..., tb: TracebackType | None = ..., - limit: int | None = ..., - chain: bool = ..., + limit: int | None = None, + chain: bool = True, ) -> list[str]: ... @overload - def format_exception(__exc: BaseException, *, limit: int | None = ..., chain: bool = ...) -> list[str]: ... + def format_exception(__exc: BaseException, *, limit: int | None = None, chain: bool = True) -> list[str]: ... else: def print_exception( etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, - limit: int | None = ..., - file: SupportsWrite[str] | None = ..., - chain: bool = ..., + limit: int | None = None, + file: SupportsWrite[str] | None = None, + chain: bool = True, ) -> None: ... def format_exception( etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, - limit: int | None = ..., - chain: bool = ..., + limit: int | None = None, + chain: bool = True, ) -> list[str]: ... -def print_exc(limit: int | None = ..., file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... -def print_last(limit: int | None = ..., file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... -def print_stack(f: FrameType | None = ..., limit: int | None = ..., file: SupportsWrite[str] | None = ...) -> None: ... -def extract_tb(tb: TracebackType | None, limit: int | None = ...) -> StackSummary: ... -def extract_stack(f: FrameType | None = ..., limit: int | None = ...) -> StackSummary: ... +def print_exc(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_last(limit: int | None = None, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... +def print_stack(f: FrameType | None = None, limit: int | None = None, file: SupportsWrite[str] | None = None) -> None: ... +def extract_tb(tb: TracebackType | None, limit: int | None = None) -> StackSummary: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> StackSummary: ... def format_list(extracted_list: list[FrameSummary]) -> list[str]: ... # undocumented -def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = ...) -> None: ... +def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = None) -> None: ... if sys.version_info >= (3, 10): def format_exception_only(__exc: type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ... @@ -89,18 +89,17 @@ if sys.version_info >= (3, 10): else: def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... -def format_exc(limit: int | None = ..., chain: bool = ...) -> str: ... -def format_tb(tb: TracebackType | None, limit: int | None = ...) -> list[str]: ... -def format_stack(f: FrameType | None = ..., limit: int | None = ...) -> list[str]: ... -def clear_frames(tb: TracebackType) -> None: ... +def format_exc(limit: int | None = None, chain: bool = True) -> str: ... +def format_tb(tb: TracebackType | None, limit: int | None = None) -> list[str]: ... +def format_stack(f: FrameType | None = None, limit: int | None = None) -> list[str]: ... +def clear_frames(tb: TracebackType | None) -> None: ... def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... if sys.version_info >= (3, 11): class _ExceptionPrintContext: - def __init__(self) -> None: ... def indent(self) -> str: ... - def emit(self, text_gen: str | Iterable[str], margin_char: str | None = ...) -> Generator[str, None, None]: ... + def emit(self, text_gen: str | Iterable[str], margin_char: str | None = None) -> Generator[str, None, None]: ... class TracebackException: __cause__: TracebackException @@ -120,17 +119,17 @@ class TracebackException: exc_value: BaseException, exc_traceback: TracebackType | None, *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., - compact: bool = ..., - max_group_width: int = ..., - max_group_depth: int = ..., - _seen: set[int] | None = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + max_group_width: int = 15, + max_group_depth: int = 10, + _seen: set[int] | None = None, ) -> None: ... @classmethod def from_exception( - cls: type[Self], + cls, exc: BaseException, *, limit: int | None = ..., @@ -147,15 +146,15 @@ class TracebackException: exc_value: BaseException, exc_traceback: TracebackType | None, *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., - compact: bool = ..., - _seen: set[int] | None = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + compact: bool = False, + _seen: set[int] | None = None, ) -> None: ... @classmethod def from_exception( - cls: type[Self], + cls, exc: BaseException, *, limit: int | None = ..., @@ -170,26 +169,26 @@ class TracebackException: exc_value: BaseException, exc_traceback: TracebackType | None, *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., - _seen: set[int] | None = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, + _seen: set[int] | None = None, ) -> None: ... @classmethod def from_exception( - cls: type[Self], exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ... + cls, exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ... ) -> Self: ... def __eq__(self, other: object) -> bool: ... if sys.version_info >= (3, 11): - def format(self, *, chain: bool = ..., _ctx: _ExceptionPrintContext | None = ...) -> Generator[str, None, None]: ... + def format(self, *, chain: bool = True, _ctx: _ExceptionPrintContext | None = None) -> Generator[str, None, None]: ... else: - def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... + def format(self, *, chain: bool = True) -> Generator[str, None, None]: ... def format_exception_only(self) -> Generator[str, None, None]: ... if sys.version_info >= (3, 11): - def print(self, *, file: SupportsWrite[str] | None = ..., chain: bool = ...) -> None: ... + def print(self, *, file: SupportsWrite[str] | None = None, chain: bool = True) -> None: ... class FrameSummary(Iterable[Any]): if sys.version_info >= (3, 11): @@ -199,12 +198,12 @@ class FrameSummary(Iterable[Any]): lineno: int | None, name: str, *, - lookup_line: bool = ..., - locals: Mapping[str, str] | None = ..., - line: str | None = ..., - end_lineno: int | None = ..., - colno: int | None = ..., - end_colno: int | None = ..., + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, + end_lineno: int | None = None, + colno: int | None = None, + end_colno: int | None = None, ) -> None: ... end_lineno: int | None colno: int | None @@ -216,9 +215,9 @@ class FrameSummary(Iterable[Any]): lineno: int | None, name: str, *, - lookup_line: bool = ..., - locals: Mapping[str, str] | None = ..., - line: str | None = ..., + lookup_line: bool = True, + locals: Mapping[str, str] | None = None, + line: str | None = None, ) -> None: ... filename: str lineno: int | None @@ -247,9 +246,9 @@ class StackSummary(list[FrameSummary]): cls, frame_gen: Iterable[tuple[FrameType, int]], *, - limit: int | None = ..., - lookup_lines: bool = ..., - capture_locals: bool = ..., + limit: int | None = None, + lookup_lines: bool = True, + capture_locals: bool = False, ) -> StackSummary: ... @classmethod def from_list(cls, a_list: Iterable[FrameSummary | _PT]) -> StackSummary: ... diff --git a/mypy/typeshed/stdlib/tracemalloc.pyi b/mypy/typeshed/stdlib/tracemalloc.pyi index ed952616600f..3dc8b8603fe5 100644 --- a/mypy/typeshed/stdlib/tracemalloc.pyi +++ b/mypy/typeshed/stdlib/tracemalloc.pyi @@ -1,7 +1,7 @@ import sys from _tracemalloc import * from collections.abc import Sequence -from typing import Any, Union, overload +from typing import Any, overload from typing_extensions import SupportsIndex, TypeAlias def get_object_traceback(obj: object) -> Traceback | None: ... @@ -23,7 +23,12 @@ class Filter(BaseFilter): def filename_pattern(self) -> str: ... all_frames: bool def __init__( - self, inclusive: bool, filename_pattern: str, lineno: int | None = ..., all_frames: bool = ..., domain: int | None = ... + self, + inclusive: bool, + filename_pattern: str, + lineno: int | None = None, + all_frames: bool = False, + domain: int | None = None, ) -> None: ... class Statistic: @@ -62,7 +67,7 @@ class Frame: def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... if sys.version_info >= (3, 9): - _TraceTuple: TypeAlias = Union[tuple[int, int, Sequence[_FrameTuple], int | None], tuple[int, int, Sequence[_FrameTuple]]] + _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple], int | None] | tuple[int, int, Sequence[_FrameTuple]] else: _TraceTuple: TypeAlias = tuple[int, int, Sequence[_FrameTuple]] @@ -80,11 +85,11 @@ class Traceback(Sequence[Frame]): if sys.version_info >= (3, 9): @property def total_nframe(self) -> int | None: ... - def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = ...) -> None: ... + def __init__(self, frames: Sequence[_FrameTuple], total_nframe: int | None = None) -> None: ... else: def __init__(self, frames: Sequence[_FrameTuple]) -> None: ... - def format(self, limit: int | None = ..., most_recent_first: bool = ...) -> list[str]: ... + def format(self, limit: int | None = None, most_recent_first: bool = False) -> list[str]: ... @overload def __getitem__(self, index: SupportsIndex) -> Frame: ... @overload @@ -104,11 +109,11 @@ class Traceback(Sequence[Frame]): class Snapshot: def __init__(self, traces: Sequence[_TraceTuple], traceback_limit: int) -> None: ... - def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = ...) -> list[StatisticDiff]: ... + def compare_to(self, old_snapshot: Snapshot, key_type: str, cumulative: bool = False) -> list[StatisticDiff]: ... def dump(self, filename: str) -> None: ... def filter_traces(self, filters: Sequence[DomainFilter | Filter]) -> Snapshot: ... @staticmethod def load(filename: str) -> Snapshot: ... - def statistics(self, key_type: str, cumulative: bool = ...) -> list[Statistic]: ... + def statistics(self, key_type: str, cumulative: bool = False) -> list[Statistic]: ... traceback_limit: int traces: Sequence[Trace] diff --git a/mypy/typeshed/stdlib/tty.pyi b/mypy/typeshed/stdlib/tty.pyi index 8edae9ec2deb..43f2e1cf9087 100644 --- a/mypy/typeshed/stdlib/tty.pyi +++ b/mypy/typeshed/stdlib/tty.pyi @@ -15,5 +15,5 @@ if sys.platform != "win32": ISPEED: int OSPEED: int CC: int - def setraw(fd: _FD, when: int = ...) -> None: ... - def setcbreak(fd: _FD, when: int = ...) -> None: ... + def setraw(fd: _FD, when: int = 2) -> None: ... + def setcbreak(fd: _FD, when: int = 2) -> None: ... diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 13197c336e5e..8017c8290fb9 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -1,8 +1,7 @@ -from _typeshed import Self from collections.abc import Callable, Sequence from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar -from typing import Any, ClassVar, Union, overload -from typing_extensions import TypeAlias +from typing import Any, ClassVar, overload +from typing_extensions import Self, TypeAlias __all__ = [ "ScrolledCanvas", @@ -133,7 +132,7 @@ __all__ = [ # alias we use for return types. Really, these two aliases should be the # same, but as per the "no union returns" typeshed policy, we'll return # Any instead. -_Color: TypeAlias = Union[str, tuple[float, float, float]] +_Color: TypeAlias = str | tuple[float, float, float] _AnyColor: TypeAlias = Any # TODO: Replace this with a TypedDict once it becomes standardized. @@ -143,7 +142,7 @@ _Speed: TypeAlias = str | float _PolygonCoords: TypeAlias = Sequence[tuple[float, float]] class Vec2D(tuple[float, float]): - def __new__(cls: type[Self], x: float, y: float) -> Self: ... + def __new__(cls, x: float, y: float) -> Self: ... def __add__(self, other: tuple[float, float]) -> Vec2D: ... # type: ignore[override] @overload # type: ignore[override] def __mul__(self, other: Vec2D) -> float: ... @@ -161,11 +160,11 @@ class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] hscroll: Scrollbar vscroll: Scrollbar def __init__( - self, master: Misc | None, width: int = ..., height: int = ..., canvwidth: int = ..., canvheight: int = ... + self, master: Misc | None, width: int = 500, height: int = 350, canvwidth: int = 600, canvheight: int = 500 ) -> None: ... canvwidth: int canvheight: int - def reset(self, canvwidth: int | None = ..., canvheight: int | None = ..., bg: str | None = ...) -> None: ... + def reset(self, canvwidth: int | None = None, canvheight: int | None = None, bg: str | None = None) -> None: ... class TurtleScreenBase: cv: Canvas @@ -177,27 +176,27 @@ class TurtleScreenBase: def mainloop(self) -> None: ... def textinput(self, title: str, prompt: str) -> str | None: ... def numinput( - self, title: str, prompt: str, default: float | None = ..., minval: float | None = ..., maxval: float | None = ... + self, title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None ) -> float | None: ... class Terminator(Exception): ... class TurtleGraphicsError(Exception): ... class Shape: - def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = ...) -> None: ... - def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = ...) -> None: ... + def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = None) -> None: ... + def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = None) -> None: ... class TurtleScreen(TurtleScreenBase): - def __init__(self, cv: Canvas, mode: str = ..., colormode: float = ..., delay: int = ...) -> None: ... + def __init__(self, cv: Canvas, mode: str = "standard", colormode: float = 1.0, delay: int = 10) -> None: ... def clear(self) -> None: ... @overload - def mode(self, mode: None = ...) -> str: ... + def mode(self, mode: None = None) -> str: ... @overload def mode(self, mode: str) -> None: ... def setworldcoordinates(self, llx: float, lly: float, urx: float, ury: float) -> None: ... - def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = ...) -> None: ... + def register_shape(self, name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... @overload - def colormode(self, cmode: None = ...) -> float: ... + def colormode(self, cmode: None = None) -> float: ... @overload def colormode(self, cmode: float) -> None: ... def reset(self) -> None: ... @@ -209,11 +208,11 @@ class TurtleScreen(TurtleScreenBase): @overload def bgcolor(self, r: float, g: float, b: float) -> None: ... @overload - def tracer(self, n: None = ...) -> int: ... + def tracer(self, n: None = None) -> int: ... @overload - def tracer(self, n: int, delay: int | None = ...) -> None: ... + def tracer(self, n: int, delay: int | None = None) -> None: ... @overload - def delay(self, delay: None = ...) -> int: ... + def delay(self, delay: None = None) -> int: ... @overload def delay(self, delay: int) -> None: ... def update(self) -> None: ... @@ -221,24 +220,24 @@ class TurtleScreen(TurtleScreenBase): def window_height(self) -> int: ... def getcanvas(self) -> Canvas: ... def getshapes(self) -> list[str]: ... - def onclick(self, fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... def onkey(self, fun: Callable[[], object], key: str) -> None: ... - def listen(self, xdummy: float | None = ..., ydummy: float | None = ...) -> None: ... - def ontimer(self, fun: Callable[[], object], t: int = ...) -> None: ... + def listen(self, xdummy: float | None = None, ydummy: float | None = None) -> None: ... + def ontimer(self, fun: Callable[[], object], t: int = 0) -> None: ... @overload - def bgpic(self, picname: None = ...) -> str: ... + def bgpic(self, picname: None = None) -> str: ... @overload def bgpic(self, picname: str) -> None: ... @overload - def screensize(self, canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> tuple[int, int]: ... + def screensize(self, canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well @overload - def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = ...) -> None: ... + def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape - def onkeypress(self, fun: Callable[[], object], key: str | None = ...) -> None: ... + def onkeypress(self, fun: Callable[[], object], key: str | None = None) -> None: ... onkeyrelease = onkey class TNavigator: @@ -246,9 +245,9 @@ class TNavigator: DEFAULT_MODE: str DEFAULT_ANGLEOFFSET: int DEFAULT_ANGLEORIENT: int - def __init__(self, mode: str = ...) -> None: ... + def __init__(self, mode: str = "standard") -> None: ... def reset(self) -> None: ... - def degrees(self, fullcircle: float = ...) -> None: ... + def degrees(self, fullcircle: float = 360.0) -> None: ... def radians(self) -> None: ... def forward(self, distance: float) -> None: ... def back(self, distance: float) -> None: ... @@ -258,23 +257,23 @@ class TNavigator: def xcor(self) -> float: ... def ycor(self) -> float: ... @overload - def goto(self, x: tuple[float, float], y: None = ...) -> None: ... + def goto(self, x: tuple[float, float], y: None = None) -> None: ... @overload def goto(self, x: float, y: float) -> None: ... def home(self) -> None: ... def setx(self, x: float) -> None: ... def sety(self, y: float) -> None: ... @overload - def distance(self, x: TNavigator | tuple[float, float], y: None = ...) -> float: ... + def distance(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def distance(self, x: float, y: float) -> float: ... @overload - def towards(self, x: TNavigator | tuple[float, float], y: None = ...) -> float: ... + def towards(self, x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def towards(self, x: float, y: float) -> float: ... def heading(self) -> float: ... def setheading(self, to_angle: float) -> None: ... - def circle(self, radius: float, extent: float | None = ..., steps: int | None = ...) -> None: ... + def circle(self, radius: float, extent: float | None = None, steps: int | None = None) -> None: ... fd = forward bk = back backward = back @@ -286,20 +285,20 @@ class TNavigator: seth = setheading class TPen: - def __init__(self, resizemode: str = ...) -> None: ... + def __init__(self, resizemode: str = "noresize") -> None: ... @overload - def resizemode(self, rmode: None = ...) -> str: ... + def resizemode(self, rmode: None = None) -> str: ... @overload def resizemode(self, rmode: str) -> None: ... @overload - def pensize(self, width: None = ...) -> int: ... + def pensize(self, width: None = None) -> int: ... @overload def pensize(self, width: int) -> None: ... def penup(self) -> None: ... def pendown(self) -> None: ... def isdown(self) -> bool: ... @overload - def speed(self, speed: None = ...) -> int: ... + def speed(self, speed: None = None) -> int: ... @overload def speed(self, speed: _Speed) -> None: ... @overload @@ -331,7 +330,7 @@ class TPen: @overload def pen( self, - pen: _PenState | None = ..., + pen: _PenState | None = None, *, shown: bool = ..., pendown: bool = ..., @@ -356,15 +355,19 @@ class RawTurtle(TPen, TNavigator): screen: TurtleScreen screens: ClassVar[list[TurtleScreen]] def __init__( - self, canvas: Canvas | TurtleScreen | None = ..., shape: str = ..., undobuffersize: int = ..., visible: bool = ... + self, + canvas: Canvas | TurtleScreen | None = None, + shape: str = "classic", + undobuffersize: int = 1000, + visible: bool = True, ) -> None: ... def reset(self) -> None: ... def setundobuffer(self, size: int | None) -> None: ... def undobufferentries(self) -> int: ... def clear(self) -> None: ... - def clone(self: Self) -> Self: ... + def clone(self) -> Self: ... @overload - def shape(self, name: None = ...) -> str: ... + def shape(self, name: None = None) -> str: ... @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @@ -372,10 +375,10 @@ class RawTurtle(TPen, TNavigator): def shapesize(self) -> tuple[float, float, float]: ... # type: ignore[misc] @overload def shapesize( - self, stretch_wid: float | None = ..., stretch_len: float | None = ..., outline: float | None = ... + self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None ) -> None: ... @overload - def shearfactor(self, shear: None = ...) -> float: ... + def shearfactor(self, shear: None = None) -> float: ... @overload def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @@ -383,12 +386,12 @@ class RawTurtle(TPen, TNavigator): def shapetransform(self) -> tuple[float, float, float, float]: ... # type: ignore[misc] @overload def shapetransform( - self, t11: float | None = ..., t12: float | None = ..., t21: float | None = ..., t22: float | None = ... + self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... def get_shapepoly(self) -> _PolygonCoords | None: ... def settiltangle(self, angle: float) -> None: ... @overload - def tiltangle(self, angle: None = ...) -> float: ... + def tiltangle(self, angle: None = None) -> float: ... @overload def tiltangle(self, angle: float) -> None: ... def tilt(self, angle: float) -> None: ... @@ -397,21 +400,21 @@ class RawTurtle(TPen, TNavigator): # we return Any. def stamp(self) -> Any: ... def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... - def clearstamps(self, n: int | None = ...) -> None: ... + def clearstamps(self, n: int | None = None) -> None: ... def filling(self) -> bool: ... def begin_fill(self) -> None: ... def end_fill(self) -> None: ... - def dot(self, size: int | None = ..., *color: _Color) -> None: ... - def write(self, arg: object, move: bool = ..., align: str = ..., font: tuple[str, int, str] = ...) -> None: ... + def dot(self, size: int | None = None, *color: _Color) -> None: ... + def write(self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... def begin_poly(self) -> None: ... def end_poly(self) -> None: ... def get_poly(self) -> _PolygonCoords | None: ... def getscreen(self) -> TurtleScreen: ... - def getturtle(self: Self) -> Self: ... + def getturtle(self) -> Self: ... getpen = getturtle - def onclick(self, fun: Callable[[float, float], object], btn: int = ..., add: bool | None = ...) -> None: ... - def onrelease(self, fun: Callable[[float, float], object], btn: int = ..., add: bool | None = ...) -> None: ... - def ondrag(self, fun: Callable[[float, float], object], btn: int = ..., add: bool | None = ...) -> None: ... + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def onrelease(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... + def ondrag(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: ... def undo(self) -> None: ... turtlesize = shapesize @@ -420,22 +423,22 @@ class _Screen(TurtleScreen): # Note int and float are interpreted differently, hence the Union instead of just float def setup( self, - width: int | float = ..., # noqa: Y041 - height: int | float = ..., # noqa: Y041 - startx: int | None = ..., - starty: int | None = ..., + width: int | float = 0.5, # noqa: Y041 + height: int | float = 0.75, # noqa: Y041 + startx: int | None = None, + starty: int | None = None, ) -> None: ... def title(self, titlestring: str) -> None: ... def bye(self) -> None: ... def exitonclick(self) -> None: ... class Turtle(RawTurtle): - def __init__(self, shape: str = ..., undobuffersize: int = ..., visible: bool = ...) -> None: ... + def __init__(self, shape: str = "classic", undobuffersize: int = 1000, visible: bool = True) -> None: ... RawPen = RawTurtle Pen = Turtle -def write_docstringdict(filename: str = ...) -> None: ... +def write_docstringdict(filename: str = "turtle_docstringdict") -> None: ... # Note: it's somewhat unfortunate that we have to copy the function signatures. # It would be nice if we could partially reduce the redundancy by doing something @@ -453,20 +456,20 @@ def write_docstringdict(filename: str = ...) -> None: ... def mainloop() -> None: ... def textinput(title: str, prompt: str) -> str | None: ... def numinput( - title: str, prompt: str, default: float | None = ..., minval: float | None = ..., maxval: float | None = ... + title: str, prompt: str, default: float | None = None, minval: float | None = None, maxval: float | None = None ) -> float | None: ... # Functions copied from TurtleScreen: def clear() -> None: ... @overload -def mode(mode: None = ...) -> str: ... +def mode(mode: None = None) -> str: ... @overload def mode(mode: str) -> None: ... def setworldcoordinates(llx: float, lly: float, urx: float, ury: float) -> None: ... -def register_shape(name: str, shape: _PolygonCoords | Shape | None = ...) -> None: ... +def register_shape(name: str, shape: _PolygonCoords | Shape | None = None) -> None: ... @overload -def colormode(cmode: None = ...) -> float: ... +def colormode(cmode: None = None) -> float: ... @overload def colormode(cmode: float) -> None: ... def reset() -> None: ... @@ -478,11 +481,11 @@ def bgcolor(color: _Color) -> None: ... @overload def bgcolor(r: float, g: float, b: float) -> None: ... @overload -def tracer(n: None = ...) -> int: ... +def tracer(n: None = None) -> int: ... @overload -def tracer(n: int, delay: int | None = ...) -> None: ... +def tracer(n: int, delay: int | None = None) -> None: ... @overload -def delay(delay: None = ...) -> int: ... +def delay(delay: None = None) -> int: ... @overload def delay(delay: int) -> None: ... def update() -> None: ... @@ -490,31 +493,31 @@ def window_width() -> int: ... def window_height() -> int: ... def getcanvas() -> Canvas: ... def getshapes() -> list[str]: ... -def onclick(fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... def onkey(fun: Callable[[], object], key: str) -> None: ... -def listen(xdummy: float | None = ..., ydummy: float | None = ...) -> None: ... -def ontimer(fun: Callable[[], object], t: int = ...) -> None: ... +def listen(xdummy: float | None = None, ydummy: float | None = None) -> None: ... +def ontimer(fun: Callable[[], object], t: int = 0) -> None: ... @overload -def bgpic(picname: None = ...) -> str: ... +def bgpic(picname: None = None) -> str: ... @overload def bgpic(picname: str) -> None: ... @overload -def screensize(canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> tuple[int, int]: ... +def screensize(canvwidth: None = None, canvheight: None = None, bg: None = None) -> tuple[int, int]: ... @overload -def screensize(canvwidth: int, canvheight: int, bg: _Color | None = ...) -> None: ... +def screensize(canvwidth: int, canvheight: int, bg: _Color | None = None) -> None: ... onscreenclick = onclick resetscreen = reset clearscreen = clear addshape = register_shape -def onkeypress(fun: Callable[[], object], key: str | None = ...) -> None: ... +def onkeypress(fun: Callable[[], object], key: str | None = None) -> None: ... onkeyrelease = onkey # Functions copied from _Screen: -def setup(width: float = ..., height: float = ..., startx: int | None = ..., starty: int | None = ...) -> None: ... +def setup(width: float = 0.5, height: float = 0.75, startx: int | None = None, starty: int | None = None) -> None: ... def title(titlestring: str) -> None: ... def bye() -> None: ... def exitonclick() -> None: ... @@ -522,7 +525,7 @@ def Screen() -> _Screen: ... # Functions copied from TNavigator: -def degrees(fullcircle: float = ...) -> None: ... +def degrees(fullcircle: float = 360.0) -> None: ... def radians() -> None: ... def forward(distance: float) -> None: ... def back(distance: float) -> None: ... @@ -532,23 +535,23 @@ def pos() -> Vec2D: ... def xcor() -> float: ... def ycor() -> float: ... @overload -def goto(x: tuple[float, float], y: None = ...) -> None: ... +def goto(x: tuple[float, float], y: None = None) -> None: ... @overload def goto(x: float, y: float) -> None: ... def home() -> None: ... def setx(x: float) -> None: ... def sety(y: float) -> None: ... @overload -def distance(x: TNavigator | tuple[float, float], y: None = ...) -> float: ... +def distance(x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def distance(x: float, y: float) -> float: ... @overload -def towards(x: TNavigator | tuple[float, float], y: None = ...) -> float: ... +def towards(x: TNavigator | tuple[float, float], y: None = None) -> float: ... @overload def towards(x: float, y: float) -> float: ... def heading() -> float: ... def setheading(to_angle: float) -> None: ... -def circle(radius: float, extent: float | None = ..., steps: int | None = ...) -> None: ... +def circle(radius: float, extent: float | None = None, steps: int | None = None) -> None: ... fd = forward bk = back @@ -562,18 +565,18 @@ seth = setheading # Functions copied from TPen: @overload -def resizemode(rmode: None = ...) -> str: ... +def resizemode(rmode: None = None) -> str: ... @overload def resizemode(rmode: str) -> None: ... @overload -def pensize(width: None = ...) -> int: ... +def pensize(width: None = None) -> int: ... @overload def pensize(width: int) -> None: ... def penup() -> None: ... def pendown() -> None: ... def isdown() -> bool: ... @overload -def speed(speed: None = ...) -> int: ... +def speed(speed: None = None) -> int: ... @overload def speed(speed: _Speed) -> None: ... @overload @@ -605,7 +608,7 @@ def isvisible() -> bool: ... def pen() -> _PenState: ... # type: ignore[misc] @overload def pen( - pen: _PenState | None = ..., + pen: _PenState | None = None, *, shown: bool = ..., pendown: bool = ..., @@ -632,7 +635,7 @@ ht = hideturtle def setundobuffer(size: int | None) -> None: ... def undobufferentries() -> int: ... @overload -def shape(name: None = ...) -> str: ... +def shape(name: None = None) -> str: ... @overload def shape(name: str) -> None: ... @@ -640,9 +643,9 @@ def shape(name: str) -> None: ... @overload def shapesize() -> tuple[float, float, float]: ... # type: ignore[misc] @overload -def shapesize(stretch_wid: float | None = ..., stretch_len: float | None = ..., outline: float | None = ...) -> None: ... +def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ... @overload -def shearfactor(shear: None = ...) -> float: ... +def shearfactor(shear: None = None) -> float: ... @overload def shearfactor(shear: float) -> None: ... @@ -651,12 +654,12 @@ def shearfactor(shear: float) -> None: ... def shapetransform() -> tuple[float, float, float, float]: ... # type: ignore[misc] @overload def shapetransform( - t11: float | None = ..., t12: float | None = ..., t21: float | None = ..., t22: float | None = ... + t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None ) -> None: ... def get_shapepoly() -> _PolygonCoords | None: ... def settiltangle(angle: float) -> None: ... @overload -def tiltangle(angle: None = ...) -> float: ... +def tiltangle(angle: None = None) -> float: ... @overload def tiltangle(angle: float) -> None: ... def tilt(angle: float) -> None: ... @@ -666,12 +669,12 @@ def tilt(angle: float) -> None: ... # we return Any. def stamp() -> Any: ... def clearstamp(stampid: int | tuple[int, ...]) -> None: ... -def clearstamps(n: int | None = ...) -> None: ... +def clearstamps(n: int | None = None) -> None: ... def filling() -> bool: ... def begin_fill() -> None: ... def end_fill() -> None: ... -def dot(size: int | None = ..., *color: _Color) -> None: ... -def write(arg: object, move: bool = ..., align: str = ..., font: tuple[str, int, str] = ...) -> None: ... +def dot(size: int | None = None, *color: _Color) -> None: ... +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... def begin_poly() -> None: ... def end_poly() -> None: ... def get_poly() -> _PolygonCoords | None: ... @@ -680,8 +683,8 @@ def getturtle() -> Turtle: ... getpen = getturtle -def onrelease(fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... -def ondrag(fun: Callable[[float, float], object], btn: int = ..., add: Any | None = ...) -> None: ... +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: ... def undo() -> None: ... turtlesize = shapesize diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 28fce697f2ca..d529b3d9ad1a 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -16,7 +16,7 @@ from collections.abc import ( from importlib.machinery import ModuleSpec # pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping -from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y027 +from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y022 from typing_extensions import Literal, ParamSpec, final __all__ = [ @@ -68,6 +68,9 @@ _V_co = TypeVar("_V_co", covariant=True) @final class _Cell: + if sys.version_info >= (3, 8): + def __init__(self, __contents: object = ...) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] cell_contents: Any @@ -238,13 +241,13 @@ class CodeType: def replace( self, *, - co_argcount: int = ..., - co_posonlyargcount: int = ..., - co_kwonlyargcount: int = ..., - co_nlocals: int = ..., - co_stacksize: int = ..., - co_flags: int = ..., - co_firstlineno: int = ..., + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, co_code: bytes = ..., co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., @@ -261,13 +264,13 @@ class CodeType: def replace( self, *, - co_argcount: int = ..., - co_posonlyargcount: int = ..., - co_kwonlyargcount: int = ..., - co_nlocals: int = ..., - co_stacksize: int = ..., - co_flags: int = ..., - co_firstlineno: int = ..., + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, co_code: bytes = ..., co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., @@ -282,13 +285,13 @@ class CodeType: def replace( self, *, - co_argcount: int = ..., - co_posonlyargcount: int = ..., - co_kwonlyargcount: int = ..., - co_nlocals: int = ..., - co_stacksize: int = ..., - co_flags: int = ..., - co_firstlineno: int = ..., + co_argcount: int = -1, + co_posonlyargcount: int = -1, + co_kwonlyargcount: int = -1, + co_nlocals: int = -1, + co_stacksize: int = -1, + co_flags: int = -1, + co_firstlineno: int = -1, co_code: bytes = ..., co_consts: tuple[object, ...] = ..., co_names: tuple[str, ...] = ..., @@ -304,9 +307,10 @@ class CodeType: class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): __hash__: ClassVar[None] # type: ignore[assignment] def __init__(self, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> None: ... - def __getitem__(self, __k: _KT) -> _VT_co: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... def copy(self) -> dict[_KT, _VT_co]: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... @@ -344,12 +348,6 @@ class ModuleType: @final class GeneratorType(Generator[_T_co, _T_contra, _V_co]): - @property - def gi_code(self) -> CodeType: ... - @property - def gi_frame(self) -> FrameType: ... - @property - def gi_running(self) -> bool: ... @property def gi_yieldfrom(self) -> GeneratorType[_T_co, _T_contra, Any] | None: ... if sys.version_info >= (3, 11): @@ -359,25 +357,18 @@ class GeneratorType(Generator[_T_co, _T_contra, _V_co]): __qualname__: str def __iter__(self) -> GeneratorType[_T_co, _T_contra, _V_co]: ... def __next__(self) -> _T_co: ... - def close(self) -> None: ... def send(self, __arg: _T_contra) -> _T_co: ... @overload def throw( self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... @final class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): @property def ag_await(self) -> Awaitable[Any] | None: ... - @property - def ag_frame(self) -> FrameType: ... - @property - def ag_running(self) -> bool: ... - @property - def ag_code(self) -> CodeType: ... __name__: str __qualname__: str def __aiter__(self) -> AsyncGeneratorType[_T_co, _T_contra]: ... @@ -388,7 +379,7 @@ class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload - async def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + async def athrow(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... def aclose(self) -> Coroutine[Any, Any, None]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @@ -398,14 +389,6 @@ class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): __name__: str __qualname__: str @property - def cr_await(self) -> Any | None: ... - @property - def cr_code(self) -> CodeType: ... - @property - def cr_frame(self) -> FrameType: ... - @property - def cr_running(self) -> bool: ... - @property def cr_origin(self) -> tuple[tuple[str, int, str], ...] | None: ... if sys.version_info >= (3, 11): @property @@ -419,7 +402,7 @@ class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = ...) -> _T_co: ... class _StaticFunctionType: # Fictional type to correct the type of MethodType.__func__. @@ -431,7 +414,7 @@ class _StaticFunctionType: # By wrapping FunctionType in _StaticFunctionType, we get the right result; # similar to wrapping a function in staticmethod() at runtime to prevent it # being bound as a method. - def __get__(self, obj: object | None, type: type | None) -> FunctionType: ... + def __get__(self, obj: object, type: type | None) -> FunctionType: ... @final class MethodType: @@ -572,12 +555,12 @@ class MemberDescriptorType: def new_class( name: str, bases: Iterable[object] = ..., - kwds: dict[str, Any] | None = ..., - exec_body: Callable[[dict[str, Any]], object] | None = ..., + kwds: dict[str, Any] | None = None, + exec_body: Callable[[dict[str, Any]], object] | None = None, ) -> type: ... def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... def prepare_class( - name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = ... + name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = None ) -> tuple[type, dict[str, Any], dict[str, Any]]: ... # Actually a different type, but `property` is special and we want that too. @@ -590,7 +573,7 @@ _P = ParamSpec("_P") # it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable # The type: ignore is due to overlapping overloads, not the use of ParamSpec @overload -def coroutine(func: Callable[_P, Generator[_R, Any, Any]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] +def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] @overload def coroutine(func: _Fn) -> _Fn: ... @@ -606,13 +589,15 @@ if sys.version_info >= (3, 9): @property def __parameters__(self) -> tuple[Any, ...]: ... def __init__(self, origin: type, args: Any) -> None: ... + def __getitem__(self, __typeargs: Any) -> GenericAlias: ... if sys.version_info >= (3, 11): @property def __unpacked__(self) -> bool: ... @property def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... - def __getattr__(self, name: str) -> Any: ... # incomplete + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... if sys.version_info >= (3, 10): @final diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index a186bb92bf00..d06b081d3ddc 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -1,6 +1,7 @@ -import _typeshed import collections # Needed by aliases like DefaultDict, see mypy issue 2986 import sys +import typing_extensions +from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Incomplete, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod from contextlib import AbstractAsyncContextManager, AbstractContextManager @@ -17,7 +18,7 @@ from types import ( TracebackType, WrapperDescriptorType, ) -from typing_extensions import ParamSpec as _ParamSpec, final as _final +from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, final as _final __all__ = [ "AbstractSet", @@ -136,7 +137,7 @@ class TypeVar: __covariant__: bool __contravariant__: bool def __init__( - self, name: str, *constraints: Any, bound: Any | None = ..., covariant: bool = ..., contravariant: bool = ... + self, name: str, *constraints: Any, bound: Any | None = None, covariant: bool = False, contravariant: bool = False ) -> None: ... if sys.version_info >= (3, 10): def __or__(self, right: Any) -> _SpecialForm: ... @@ -214,7 +215,9 @@ if sys.version_info >= (3, 10): __bound__: Any | None __covariant__: bool __contravariant__: bool - def __init__(self, name: str, *, bound: Any | None = ..., contravariant: bool = ..., covariant: bool = ...) -> None: ... + def __init__( + self, name: str, *, bound: Any | None = None, contravariant: bool = False, covariant: bool = False + ) -> None: ... @property def args(self) -> ParamSpecArgs: ... @property @@ -359,11 +362,11 @@ class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): @overload @abstractmethod def throw( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None ) -> _T_co: ... @overload @abstractmethod - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> _T_co: ... def close(self) -> None: ... def __iter__(self) -> Generator[_T_co, _T_contra, _V_co]: ... @property @@ -396,11 +399,11 @@ class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): @overload @abstractmethod def throw( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None ) -> _T_co: ... @overload @abstractmethod - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> _T_co: ... @abstractmethod def close(self) -> None: ... @@ -429,11 +432,11 @@ class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): @overload @abstractmethod def athrow( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: Type[BaseException], __val: BaseException | object = None, __tb: TracebackType | None = None ) -> Awaitable[_T_co]: ... @overload @abstractmethod - def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> Awaitable[_T_co]: ... + def athrow(self, __typ: BaseException, __val: None = None, __tb: TracebackType | None = None) -> Awaitable[_T_co]: ... def aclose(self) -> Awaitable[None]: ... @property def ag_await(self) -> Any: ... @@ -446,6 +449,7 @@ class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): @runtime_checkable class Container(Protocol[_T_co]): + # This is generic more on vibes than anything else @abstractmethod def __contains__(self, __x: object) -> bool: ... @@ -463,7 +467,7 @@ class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @abstractmethod def __getitem__(self, index: slice) -> Sequence[_T_co]: ... # Mixin methods - def index(self, value: Any, start: int = ..., stop: int = ...) -> int: ... + def index(self, value: Any, start: int = 0, stop: int = ...) -> int: ... def count(self, value: Any) -> int: ... def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... @@ -495,9 +499,9 @@ class MutableSequence(Sequence[_T], Generic[_T]): def clear(self) -> None: ... def extend(self, values: Iterable[_T]) -> None: ... def reverse(self) -> None: ... - def pop(self, index: int = ...) -> _T: ... + def pop(self, index: int = -1) -> _T: ... def remove(self, value: _T) -> None: ... - def __iadd__(self: _typeshed.Self, values: Iterable[_T]) -> _typeshed.Self: ... + def __iadd__(self, values: Iterable[_T]) -> typing_extensions.Self: ... class AbstractSet(Collection[_T_co], Generic[_T_co]): @abstractmethod @@ -523,10 +527,10 @@ class MutableSet(AbstractSet[_T], Generic[_T]): def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, value: _T) -> None: ... - def __ior__(self: _typeshed.Self, it: AbstractSet[_T]) -> _typeshed.Self: ... # type: ignore[override,misc] - def __iand__(self: _typeshed.Self, it: AbstractSet[Any]) -> _typeshed.Self: ... - def __ixor__(self: _typeshed.Self, it: AbstractSet[_T]) -> _typeshed.Self: ... # type: ignore[override,misc] - def __isub__(self: _typeshed.Self, it: AbstractSet[Any]) -> _typeshed.Self: ... + def __ior__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __iand__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... + def __ixor__(self, it: AbstractSet[_T]) -> typing_extensions.Self: ... # type: ignore[override,misc] + def __isub__(self, it: AbstractSet[Any]) -> typing_extensions.Self: ... class MappingView(Sized): def __init__(self, mapping: Mapping[Any, Any]) -> None: ... # undocumented @@ -564,7 +568,7 @@ class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... -class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): +class ValuesView(MappingView, Collection[_VT_co], Generic[_VT_co]): def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ... # undocumented def __contains__(self, value: object) -> bool: ... def __iter__(self) -> Iterator[_VT_co]: ... @@ -575,7 +579,7 @@ class Mapping(Collection[_KT], Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https://github.com/python/typing/pull/273. @abstractmethod - def __getitem__(self, __k: _KT) -> _VT_co: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... # Mixin methods @overload def get(self, __key: _KT) -> _VT_co | None: ... @@ -588,9 +592,9 @@ class Mapping(Collection[_KT], Generic[_KT, _VT_co]): class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod - def __setitem__(self, __k: _KT, __v: _VT) -> None: ... + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... @abstractmethod - def __delitem__(self, __v: _KT) -> None: ... + def __delitem__(self, __key: _KT) -> None: ... def clear(self) -> None: ... @overload def pop(self, __key: _KT) -> _VT: ... @@ -598,9 +602,13 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ... def popitem(self) -> tuple[_KT, _VT]: ... # This overload should be allowed only if the value type is compatible with None. - # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + # + # Keep the following methods in line with MutableMapping.setdefault, modulo positional-only differences: + # -- collections.OrderedDict.setdefault + # -- collections.ChainMap.setdefault + # -- weakref.WeakKeyDictionary.setdefault @overload - def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT) -> _T | None: ... + def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT, __default: None = None) -> _T | None: ... @overload def setdefault(self, __key: _KT, __default: _VT) -> _VT: ... # 'update' used to take a Union, but using overloading is better. @@ -619,6 +627,8 @@ class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): # -- os._Environ.__ior__ # -- collections.UserDict.__ior__ # -- collections.ChainMap.__ior__ + # -- peewee.attrdict.__add__ + # -- peewee.attrdict.__iadd__ # -- weakref.WeakValueDictionary.__ior__ # -- weakref.WeakKeyDictionary.__ior__ @overload @@ -636,7 +646,9 @@ TYPE_CHECKING: bool # This differs from runtime, but better reflects the fact that in reality # classes deriving from IO use different names for the arguments. class IO(Iterator[AnyStr], Generic[AnyStr]): - # TODO use abstract properties + # At runtime these are all abstract properties, + # but making them abstract in the stub is hugely disruptive, for not much gain. + # See #8726 @property def mode(self) -> str: ... @property @@ -652,21 +664,21 @@ class IO(Iterator[AnyStr], Generic[AnyStr]): @abstractmethod def isatty(self) -> bool: ... @abstractmethod - def read(self, __n: int = ...) -> AnyStr: ... + def read(self, __n: int = -1) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod - def readline(self, __limit: int = ...) -> AnyStr: ... + def readline(self, __limit: int = -1) -> AnyStr: ... @abstractmethod - def readlines(self, __hint: int = ...) -> list[AnyStr]: ... + def readlines(self, __hint: int = -1) -> list[AnyStr]: ... @abstractmethod - def seek(self, __offset: int, __whence: int = ...) -> int: ... + def seek(self, __offset: int, __whence: int = 0) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod - def truncate(self, __size: int | None = ...) -> int: ... + def truncate(self, __size: int | None = None) -> int: ... @abstractmethod def writable(self) -> bool: ... @abstractmethod @@ -689,7 +701,7 @@ class BinaryIO(IO[bytes]): def __enter__(self) -> BinaryIO: ... class TextIO(IO[str]): - # TODO use abstractproperty + # See comment regarding the @properties in the `IO` class @property def buffer(self) -> BinaryIO: ... @property @@ -707,7 +719,7 @@ class ByteString(Sequence[int], metaclass=ABCMeta): ... # Functions -_get_type_hints_obj_allowed_types = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed +_get_type_hints_obj_allowed_types: typing_extensions.TypeAlias = ( # noqa: Y042 object | Callable[..., Any] | FunctionType @@ -722,14 +734,14 @@ _get_type_hints_obj_allowed_types = ( # noqa: Y026 # TODO: Use TypeAlias once if sys.version_info >= (3, 9): def get_type_hints( obj: _get_type_hints_obj_allowed_types, - globalns: dict[str, Any] | None = ..., - localns: dict[str, Any] | None = ..., - include_extras: bool = ..., + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, ) -> dict[str, Any]: ... else: def get_type_hints( - obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = ..., localns: dict[str, Any] | None = ... + obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = None, localns: dict[str, Any] | None = None ) -> dict[str, Any]: ... if sys.version_info >= (3, 8): @@ -751,9 +763,9 @@ if sys.version_info >= (3, 11): def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... def dataclass_transform( *, - eq_default: bool = ..., - order_default: bool = ..., - kw_only_default: bool = ..., + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: Any, ) -> IdentityFunction: ... @@ -771,7 +783,7 @@ class NamedTuple(tuple[Any, ...]): @overload def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... @overload - def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... @classmethod def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... if sys.version_info >= (3, 8): @@ -779,7 +791,7 @@ class NamedTuple(tuple[Any, ...]): else: def _asdict(self) -> collections.OrderedDict[str, Any]: ... - def _replace(self: _typeshed.Self, **kwargs: Any) -> _typeshed.Self: ... + def _replace(self, **kwargs: Any) -> typing_extensions.Self: ... # Internal mypy fallback type for all typed dicts (does not exist at runtime) # N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict @@ -789,20 +801,20 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): if sys.version_info >= (3, 9): __required_keys__: ClassVar[frozenset[str]] __optional_keys__: ClassVar[frozenset[str]] - def copy(self: _typeshed.Self) -> _typeshed.Self: ... - # Using NoReturn so that only calls using mypy plugin hook that specialize the signature + def copy(self) -> typing_extensions.Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature # can go through. - def setdefault(self, k: NoReturn, default: object) -> object: ... + def setdefault(self, k: _Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. - def pop(self, k: NoReturn, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def pop(self, k: _Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] def update(self: _T, __m: _T) -> None: ... - def __delitem__(self, k: NoReturn) -> None: ... - def items(self) -> ItemsView[str, object]: ... - def keys(self) -> KeysView[str]: ... - def values(self) -> ValuesView[object]: ... + def __delitem__(self, k: _Never) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... if sys.version_info >= (3, 9): - def __or__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... - def __ior__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... + def __or__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... + def __ior__(self, __value: typing_extensions.Self) -> typing_extensions.Self: ... @_final class ForwardRef: @@ -815,11 +827,17 @@ class ForwardRef: __forward_module__: Any | None if sys.version_info >= (3, 9): # The module and is_class arguments were added in later Python 3.9 versions. - def __init__(self, arg: str, is_argument: bool = ..., module: Any | None = ..., *, is_class: bool = ...) -> None: ... + def __init__(self, arg: str, is_argument: bool = True, module: Any | None = None, *, is_class: bool = False) -> None: ... else: - def __init__(self, arg: str, is_argument: bool = ...) -> None: ... + def __init__(self, arg: str, is_argument: bool = True) -> None: ... + + if sys.version_info >= (3, 9): + def _evaluate( + self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, recursive_guard: frozenset[str] + ) -> Any | None: ... + else: + def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... - def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... def __eq__(self, other: object) -> bool: ... if sys.version_info >= (3, 11): def __or__(self, other: Any) -> _SpecialForm: ... @@ -827,3 +845,5 @@ class ForwardRef: if sys.version_info >= (3, 10): def is_typeddict(tp: object) -> bool: ... + +def _type_repr(obj: object) -> str: ... diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index edc0d228e7a1..bf3892d5709e 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -1,12 +1,13 @@ -import _typeshed import abc import collections import sys -from _typeshed import IdentityFunction +import typing +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Incomplete from collections.abc import Iterable -from typing import ( # noqa: Y022,Y027,Y039 +from typing import ( # noqa: Y022,Y039 TYPE_CHECKING as TYPE_CHECKING, - Any, + Any as Any, AsyncContextManager as AsyncContextManager, AsyncGenerator as AsyncGenerator, AsyncIterable as AsyncIterable, @@ -20,22 +21,19 @@ from typing import ( # noqa: Y022,Y027,Y039 Counter as Counter, DefaultDict as DefaultDict, Deque as Deque, - ItemsView, - KeysView, Mapping, NewType as NewType, NoReturn as NoReturn, Sequence, Text as Text, Type as Type, - TypeVar, - ValuesView, _Alias, overload as overload, type_check_only, ) __all__ = [ + "Any", "ClassVar", "Concatenate", "Final", @@ -45,6 +43,7 @@ __all__ = [ "ParamSpecKwargs", "Self", "Type", + "TypeVar", "TypeVarTuple", "Unpack", "Awaitable", @@ -72,6 +71,7 @@ __all__ = [ "Literal", "NewType", "overload", + "override", "Protocol", "reveal_type", "runtime", @@ -91,9 +91,9 @@ __all__ = [ "get_type_hints", ] -_T = TypeVar("_T") -_F = TypeVar("_F", bound=Callable[..., Any]) -_TC = TypeVar("_TC", bound=Type[object]) +_T = typing.TypeVar("_T") +_F = typing.TypeVar("_F", bound=Callable[..., Any]) +_TC = typing.TypeVar("_TC", bound=Type[object]) # unfortunately we have to duplicate this class definition from typing.pyi or we break pytype class _SpecialForm: @@ -128,20 +128,20 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): __required_keys__: ClassVar[frozenset[str]] __optional_keys__: ClassVar[frozenset[str]] __total__: ClassVar[bool] - def copy(self: _typeshed.Self) -> _typeshed.Self: ... - # Using NoReturn so that only calls using mypy plugin hook that specialize the signature + def copy(self) -> Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature # can go through. - def setdefault(self, k: NoReturn, default: object) -> object: ... + def setdefault(self, k: Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. - def pop(self, k: NoReturn, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] def update(self: _T, __m: _T) -> None: ... - def items(self) -> ItemsView[str, object]: ... - def keys(self) -> KeysView[str]: ... - def values(self) -> ValuesView[object]: ... - def __delitem__(self, k: NoReturn) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... if sys.version_info >= (3, 9): - def __or__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... - def __ior__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... + def __or__(self, __value: Self) -> Self: ... + def __ior__(self, __value: Self) -> Self: ... # TypedDict is a (non-subscriptable) special form. TypedDict: object @@ -150,9 +150,9 @@ OrderedDict = _Alias() def get_type_hints( obj: Callable[..., Any], - globalns: dict[str, Any] | None = ..., - localns: dict[str, Any] | None = ..., - include_extras: bool = ..., + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, ) -> dict[str, Any]: ... def get_args(tp: Any) -> tuple[Any, ...]: ... def get_origin(tp: Any) -> Any | None: ... @@ -169,7 +169,6 @@ class SupportsIndex(Protocol, metaclass=abc.ABCMeta): if sys.version_info >= (3, 10): from typing import ( Concatenate as Concatenate, - ParamSpec as ParamSpec, ParamSpecArgs as ParamSpecArgs, ParamSpecKwargs as ParamSpecKwargs, TypeAlias as TypeAlias, @@ -185,18 +184,6 @@ else: __origin__: ParamSpec def __init__(self, origin: ParamSpec) -> None: ... - class ParamSpec: - __name__: str - __bound__: type[Any] | None - __covariant__: bool - __contravariant__: bool - def __init__( - self, name: str, *, bound: None | type[Any] | str = ..., contravariant: bool = ..., covariant: bool = ... - ) -> None: ... - @property - def args(self) -> ParamSpecArgs: ... - @property - def kwargs(self) -> ParamSpecKwargs: ... Concatenate: _SpecialForm TypeAlias: _SpecialForm TypeGuard: _SpecialForm @@ -212,7 +199,6 @@ if sys.version_info >= (3, 11): NotRequired as NotRequired, Required as Required, Self as Self, - TypeVarTuple as TypeVarTuple, Unpack as Unpack, assert_never as assert_never, assert_type as assert_type, @@ -223,9 +209,9 @@ if sys.version_info >= (3, 11): ) else: Self: _SpecialForm - Never: _SpecialForm + Never: _SpecialForm = ... def reveal_type(__obj: _T) -> _T: ... - def assert_never(__arg: NoReturn) -> NoReturn: ... + def assert_never(__arg: Never) -> Never: ... def assert_type(__val: _T, __typ: Any) -> _T: ... def clear_overloads() -> None: ... def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... @@ -235,17 +221,11 @@ else: LiteralString: _SpecialForm Unpack: _SpecialForm - @final - class TypeVarTuple: - __name__: str - def __init__(self, name: str) -> None: ... - def __iter__(self) -> Any: ... # Unpack[Self] - def dataclass_transform( *, - eq_default: bool = ..., - order_default: bool = ..., - kw_only_default: bool = ..., + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., **kwargs: object, ) -> IdentityFunction: ... @@ -261,12 +241,70 @@ else: @overload def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... @overload - def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... @classmethod - def _make(cls: type[_typeshed.Self], iterable: Iterable[Any]) -> _typeshed.Self: ... + def _make(cls, iterable: Iterable[Any]) -> Self: ... if sys.version_info >= (3, 8): def _asdict(self) -> dict[str, Any]: ... else: def _asdict(self) -> collections.OrderedDict[str, Any]: ... - def _replace(self: _typeshed.Self, **kwargs: Any) -> _typeshed.Self: ... + def _replace(self, **kwargs: Any) -> Self: ... + +# New things in 3.xx +# The `default` parameter was added to TypeVar, ParamSpec, and TypeVarTuple (PEP 696) +# The `infer_variance` parameter was added to TypeVar (PEP 695) +# typing_extensions.override (PEP 698) +@final +class TypeVar: + __name__: str + __bound__: Any | None + __constraints__: tuple[Any, ...] + __covariant__: bool + __contravariant__: bool + __default__: Any | None + def __init__( + self, + name: str, + *constraints: Any, + bound: Any | None = None, + covariant: bool = False, + contravariant: bool = False, + default: Any | None = None, + infer_variance: bool = False, + ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + +@final +class ParamSpec: + __name__: str + __bound__: type[Any] | None + __covariant__: bool + __contravariant__: bool + __default__: type[Any] | None + def __init__( + self, + name: str, + *, + bound: None | type[Any] | str = None, + contravariant: bool = False, + covariant: bool = False, + default: type[Any] | str | None = None, + ) -> None: ... + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + +@final +class TypeVarTuple: + __name__: str + __default__: Any | None + def __init__(self, name: str, *, default: Any | None = None) -> None: ... + def __iter__(self) -> Any: ... # Unpack[Self] + +def override(__arg: _F) -> _F: ... diff --git a/mypy/typeshed/stdlib/unicodedata.pyi b/mypy/typeshed/stdlib/unicodedata.pyi index 7337ab8789b2..5a1f7fe6638d 100644 --- a/mypy/typeshed/stdlib/unicodedata.pyi +++ b/mypy/typeshed/stdlib/unicodedata.pyi @@ -1,6 +1,7 @@ import sys -from typing import Any, TypeVar -from typing_extensions import final +from _typeshed import ReadOnlyBuffer +from typing import Any, TypeVar, overload +from typing_extensions import Literal, TypeAlias, final ucd_3_2_0: UCD unidata_version: str @@ -13,36 +14,63 @@ _T = TypeVar("_T") def bidirectional(__chr: str) -> str: ... def category(__chr: str) -> str: ... def combining(__chr: str) -> int: ... -def decimal(__chr: str, __default: _T = ...) -> int | _T: ... +@overload +def decimal(__chr: str) -> int: ... +@overload +def decimal(__chr: str, __default: _T) -> int | _T: ... def decomposition(__chr: str) -> str: ... -def digit(__chr: str, __default: _T = ...) -> int | _T: ... -def east_asian_width(__chr: str) -> str: ... +@overload +def digit(__chr: str) -> int: ... +@overload +def digit(__chr: str, __default: _T) -> int | _T: ... + +_EastAsianWidth: TypeAlias = Literal["F", "H", "W", "Na", "A", "N"] + +def east_asian_width(__chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(__form: str, __unistr: str) -> bool: ... -def lookup(__name: str | bytes) -> str: ... +def lookup(__name: str | ReadOnlyBuffer) -> str: ... def mirrored(__chr: str) -> int: ... -def name(__chr: str, __default: _T = ...) -> str | _T: ... +@overload +def name(__chr: str) -> str: ... +@overload +def name(__chr: str, __default: _T) -> str | _T: ... def normalize(__form: str, __unistr: str) -> str: ... -def numeric(__chr: str, __default: _T = ...) -> float | _T: ... +@overload +def numeric(__chr: str) -> float: ... +@overload +def numeric(__chr: str, __default: _T) -> float | _T: ... @final class UCD: # The methods below are constructed from the same array in C - # (unicodedata_functions) and hence identical to the methods above. + # (unicodedata_functions) and hence identical to the functions above. unidata_version: str def bidirectional(self, __chr: str) -> str: ... def category(self, __chr: str) -> str: ... def combining(self, __chr: str) -> int: ... - def decimal(self, __chr: str, __default: _T = ...) -> int | _T: ... + @overload + def decimal(self, __chr: str) -> int: ... + @overload + def decimal(self, __chr: str, __default: _T) -> int | _T: ... def decomposition(self, __chr: str) -> str: ... - def digit(self, __chr: str, __default: _T = ...) -> int | _T: ... - def east_asian_width(self, __chr: str) -> str: ... + @overload + def digit(self, __chr: str) -> int: ... + @overload + def digit(self, __chr: str, __default: _T) -> int | _T: ... + def east_asian_width(self, __chr: str) -> _EastAsianWidth: ... if sys.version_info >= (3, 8): def is_normalized(self, __form: str, __unistr: str) -> bool: ... - def lookup(self, __name: str | bytes) -> str: ... + def lookup(self, __name: str | ReadOnlyBuffer) -> str: ... def mirrored(self, __chr: str) -> int: ... - def name(self, __chr: str, __default: _T = ...) -> str | _T: ... + @overload + def name(self, __chr: str) -> str: ... + @overload + def name(self, __chr: str, __default: _T) -> str | _T: ... def normalize(self, __form: str, __unistr: str) -> str: ... - def numeric(self, __chr: str, __default: _T = ...) -> float | _T: ... + @overload + def numeric(self, __chr: str) -> float: ... + @overload + def numeric(self, __chr: str, __default: _T) -> float | _T: ... diff --git a/mypy/typeshed/stdlib/unittest/case.pyi b/mypy/typeshed/stdlib/unittest/case.pyi index 7db217077f1b..8f8cf43385a8 100644 --- a/mypy/typeshed/stdlib/unittest/case.pyi +++ b/mypy/typeshed/stdlib/unittest/case.pyi @@ -1,26 +1,13 @@ import logging import sys import unittest.result -from _typeshed import Self, SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, SupportsDunderLT, SupportsRSub, SupportsSub +from _typeshed import SupportsDunderGE, SupportsDunderGT, SupportsDunderLE, SupportsDunderLT, SupportsRSub, SupportsSub from collections.abc import Callable, Container, Iterable, Mapping, Sequence, Set as AbstractSet from contextlib import AbstractContextManager from re import Pattern from types import TracebackType -from typing import ( - Any, - AnyStr, - ClassVar, - Generic, - NamedTuple, - NoReturn, - Protocol, - SupportsAbs, - SupportsRound, - TypeVar, - Union, - overload, -) -from typing_extensions import ParamSpec, TypeAlias +from typing import Any, AnyStr, ClassVar, Generic, NamedTuple, NoReturn, Protocol, SupportsAbs, SupportsRound, TypeVar, overload +from typing_extensions import ParamSpec, Self, TypeAlias from warnings import WarningMessage if sys.version_info >= (3, 9): @@ -65,7 +52,7 @@ else: ) -> bool | None: ... if sys.version_info >= (3, 8): - def addModuleCleanup(__function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def addModuleCleanup(__function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... def doModuleCleanups() -> None: ... if sys.version_info >= (3, 11): @@ -82,9 +69,9 @@ class SkipTest(Exception): class _SupportsAbsAndDunderGE(SupportsDunderGE[Any], SupportsAbs[Any], Protocol): ... if sys.version_info >= (3, 10): - _IsInstanceClassInfo: TypeAlias = Union[type, UnionType, tuple[type | UnionType | tuple[Any, ...], ...]] + _IsInstanceClassInfo: TypeAlias = type | UnionType | tuple[type | UnionType | tuple[Any, ...], ...] else: - _IsInstanceClassInfo: TypeAlias = Union[type, tuple[type | tuple[Any, ...], ...]] + _IsInstanceClassInfo: TypeAlias = type | tuple[type | tuple[Any, ...], ...] class TestCase: failureException: type[BaseException] @@ -94,7 +81,7 @@ class TestCase: _testMethodName: str # undocumented _testMethodDoc: str - def __init__(self, methodName: str = ...) -> None: ... + def __init__(self, methodName: str = "runTest") -> None: ... def __eq__(self, other: object) -> bool: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @@ -102,42 +89,42 @@ class TestCase: def setUpClass(cls) -> None: ... @classmethod def tearDownClass(cls) -> None: ... - def run(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... + def run(self, result: unittest.result.TestResult | None = None) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... - def skipTest(self, reason: Any) -> None: ... + def skipTest(self, reason: Any) -> NoReturn: ... def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... def debug(self) -> None: ... if sys.version_info < (3, 11): def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... - def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def assertNotEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def assertTrue(self, expr: Any, msg: Any = ...) -> None: ... - def assertFalse(self, expr: Any, msg: Any = ...) -> None: ... - def assertIs(self, expr1: object, expr2: object, msg: Any = ...) -> None: ... - def assertIsNot(self, expr1: object, expr2: object, msg: Any = ...) -> None: ... - def assertIsNone(self, obj: object, msg: Any = ...) -> None: ... - def assertIsNotNone(self, obj: object, msg: Any = ...) -> None: ... - def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = ...) -> None: ... - def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = ...) -> None: ... - def assertIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = ...) -> None: ... - def assertNotIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = ...) -> None: ... + def assertEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertNotEqual(self, first: Any, second: Any, msg: Any = None) -> None: ... + def assertTrue(self, expr: Any, msg: Any = None) -> None: ... + def assertFalse(self, expr: Any, msg: Any = None) -> None: ... + def assertIs(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNot(self, expr1: object, expr2: object, msg: Any = None) -> None: ... + def assertIsNone(self, obj: object, msg: Any = None) -> None: ... + def assertIsNotNone(self, obj: object, msg: Any = None) -> None: ... + def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = None) -> None: ... + def assertIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = None) -> None: ... + def assertNotIsInstance(self, obj: object, cls: _IsInstanceClassInfo, msg: Any = None) -> None: ... @overload - def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = ...) -> None: ... + def assertGreater(self, a: SupportsDunderGT[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = ...) -> None: ... + def assertGreater(self, a: _T, b: SupportsDunderLT[_T], msg: Any = None) -> None: ... @overload - def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = ...) -> None: ... + def assertGreaterEqual(self, a: SupportsDunderGE[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = ...) -> None: ... + def assertGreaterEqual(self, a: _T, b: SupportsDunderLE[_T], msg: Any = None) -> None: ... @overload - def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = ...) -> None: ... + def assertLess(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = ...) -> None: ... + def assertLess(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = ...) -> None: ... + def assertLessEqual(self, a: SupportsDunderLT[_T], b: _T, msg: Any = None) -> None: ... @overload - def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = ...) -> None: ... + def assertLessEqual(self, a: _T, b: SupportsDunderGT[_T], msg: Any = None) -> None: ... # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` # are not using `ParamSpec` intentionally, # because they might be used with explicitly wrong arg types to raise some error in tests. @@ -150,126 +137,124 @@ class TestCase: **kwargs: Any, ) -> None: ... @overload - def assertRaises(self, expected_exception: type[_E] | tuple[type[_E], ...], msg: Any = ...) -> _AssertRaisesContext[_E]: ... + def assertRaises( + self, expected_exception: type[_E] | tuple[type[_E], ...], *, msg: Any = ... + ) -> _AssertRaisesContext[_E]: ... @overload def assertRaisesRegex( # type: ignore[misc] self, expected_exception: type[BaseException] | tuple[type[BaseException], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - callable: Callable[..., object], + expected_regex: str | Pattern[str], + callable: Callable[..., Any], *args: Any, **kwargs: Any, ) -> None: ... @overload def assertRaisesRegex( - self, - expected_exception: type[_E] | tuple[type[_E], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - msg: Any = ..., + self, expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertRaisesContext[_E]: ... @overload def assertWarns( # type: ignore[misc] self, expected_warning: type[Warning] | tuple[type[Warning], ...], - callable: Callable[_P, object], + callable: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs, ) -> None: ... @overload - def assertWarns(self, expected_warning: type[Warning] | tuple[type[Warning], ...], msg: Any = ...) -> _AssertWarnsContext: ... + def assertWarns( + self, expected_warning: type[Warning] | tuple[type[Warning], ...], *, msg: Any = ... + ) -> _AssertWarnsContext: ... @overload def assertWarnsRegex( # type: ignore[misc] self, expected_warning: type[Warning] | tuple[type[Warning], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - callable: Callable[_P, object], + expected_regex: str | Pattern[str], + callable: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs, ) -> None: ... @overload def assertWarnsRegex( - self, - expected_warning: type[Warning] | tuple[type[Warning], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - msg: Any = ..., + self, expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | Pattern[str], *, msg: Any = ... ) -> _AssertWarnsContext: ... def assertLogs( - self, logger: str | logging.Logger | None = ..., level: int | str | None = ... + self, logger: str | logging.Logger | None = None, level: int | str | None = None ) -> _AssertLogsContext[_LoggingWatcher]: ... if sys.version_info >= (3, 10): def assertNoLogs( - self, logger: str | logging.Logger | None = ..., level: int | str | None = ... + self, logger: str | logging.Logger | None = None, level: int | str | None = None ) -> _AssertLogsContext[None]: ... @overload def assertAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertAlmostEqual( - self, first: _S, second: _S, places: None = ..., msg: Any = ..., *, delta: _SupportsAbsAndDunderGE + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE ) -> None: ... @overload def assertAlmostEqual( self, first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], second: _T, - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... @overload def assertAlmostEqual( self, first: _T, second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... @overload def assertNotAlmostEqual(self, first: _S, second: _S, places: None, msg: Any, delta: _SupportsAbsAndDunderGE) -> None: ... @overload def assertNotAlmostEqual( - self, first: _S, second: _S, places: None = ..., msg: Any = ..., *, delta: _SupportsAbsAndDunderGE + self, first: _S, second: _S, places: None = None, msg: Any = None, *, delta: _SupportsAbsAndDunderGE ) -> None: ... @overload def assertNotAlmostEqual( self, first: SupportsSub[_T, SupportsAbs[SupportsRound[object]]], second: _T, - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... @overload def assertNotAlmostEqual( self, first: _T, second: SupportsRSub[_T, SupportsAbs[SupportsRound[object]]], - places: int | None = ..., - msg: Any = ..., - delta: None = ..., + places: int | None = None, + msg: Any = None, + delta: None = None, ) -> None: ... - def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... - def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... - def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = ...) -> None: ... + def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = None) -> None: ... + def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = None) -> None: ... def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... - def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: ... + def assertMultiLineEqual(self, first: str, second: str, msg: Any = None) -> None: ... def assertSequenceEqual( - self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = ..., seq_type: type[Sequence[Any]] | None = ... + self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = None, seq_type: type[Sequence[Any]] | None = None ) -> None: ... - def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = ...) -> None: ... - def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = ...) -> None: ... - def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = ...) -> None: ... - def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = ...) -> None: ... - def fail(self, msg: Any = ...) -> NoReturn: ... + def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = None) -> None: ... + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = None) -> None: ... + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = None) -> None: ... + def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = None) -> None: ... + def fail(self, msg: Any = None) -> NoReturn: ... def countTestCases(self) -> int: ... def defaultTestResult(self) -> unittest.result.TestResult: ... def id(self) -> str: ... def shortDescription(self) -> str | None: ... if sys.version_info >= (3, 8): - def addCleanup(self, __function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def addCleanup(self, __function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... else: - def addCleanup(self, function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def addCleanup(self, function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... if sys.version_info >= (3, 11): def enterContext(self, cm: AbstractContextManager[_T]) -> _T: ... @@ -277,7 +262,7 @@ class TestCase: def doCleanups(self) -> None: ... if sys.version_info >= (3, 8): @classmethod - def addClassCleanup(cls, __function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def addClassCleanup(cls, __function: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... @classmethod def doClassCleanups(cls) -> None: ... @@ -304,22 +289,22 @@ class TestCase: assertNotRegexpMatches = assertNotRegex assertRaisesRegexp = assertRaisesRegex def assertDictContainsSubset( - self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = ... + self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = None ) -> None: ... class FunctionTestCase(TestCase): def __init__( self, - testFunc: Callable[[], object], - setUp: Callable[[], object] | None = ..., - tearDown: Callable[[], object] | None = ..., - description: str | None = ..., + testFunc: Callable[[], Any], + setUp: Callable[[], Any] | None = None, + tearDown: Callable[[], Any] | None = None, + description: str | None = None, ) -> None: ... def runTest(self) -> None: ... class _AssertRaisesContext(Generic[_E]): exception: _E - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> bool: ... @@ -331,7 +316,7 @@ class _AssertWarnsContext: filename: str lineno: int warnings: list[WarningMessage] - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/loader.pyi b/mypy/typeshed/stdlib/unittest/loader.pyi index 9ba04b084c7f..f3850c939d07 100644 --- a/mypy/typeshed/stdlib/unittest/loader.pyi +++ b/mypy/typeshed/stdlib/unittest/loader.pyi @@ -18,11 +18,14 @@ class TestLoader: testNamePatterns: list[str] | None suiteClass: _SuiteClass def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... - def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = ...) -> unittest.suite.TestSuite: ... - def loadTestsFromName(self, name: str, module: ModuleType | None = ...) -> unittest.suite.TestSuite: ... - def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = ...) -> unittest.suite.TestSuite: ... + def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = None) -> unittest.suite.TestSuite: ... + def loadTestsFromName(self, name: str, module: ModuleType | None = None) -> unittest.suite.TestSuite: ... + def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = None) -> unittest.suite.TestSuite: ... def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... - def discover(self, start_dir: str, pattern: str = ..., top_level_dir: str | None = ...) -> unittest.suite.TestSuite: ... + def discover( + self, start_dir: str, pattern: str = "test*.py", top_level_dir: str | None = None + ) -> unittest.suite.TestSuite: ... + def _match_path(self, path: str, full_path: str, pattern: str) -> bool: ... defaultTestLoader: TestLoader @@ -30,14 +33,14 @@ def getTestCaseNames( testCaseClass: type[unittest.case.TestCase], prefix: str, sortUsing: _SortComparisonMethod = ..., - testNamePatterns: list[str] | None = ..., + testNamePatterns: list[str] | None = None, ) -> Sequence[str]: ... def makeSuite( testCaseClass: type[unittest.case.TestCase], - prefix: str = ..., + prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ..., ) -> unittest.suite.TestSuite: ... def findTestCases( - module: ModuleType, prefix: str = ..., sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... + module: ModuleType, prefix: str = "test", sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ... ) -> unittest.suite.TestSuite: ... diff --git a/mypy/typeshed/stdlib/unittest/main.pyi b/mypy/typeshed/stdlib/unittest/main.pyi index 915d559cce5b..6d970c920096 100644 --- a/mypy/typeshed/stdlib/unittest/main.pyi +++ b/mypy/typeshed/stdlib/unittest/main.pyi @@ -25,23 +25,23 @@ class TestProgram: testNamePatterns: list[str] | None def __init__( self, - module: None | str | ModuleType = ..., - defaultTest: str | Iterable[str] | None = ..., - argv: list[str] | None = ..., - testRunner: type[_TestRunner] | _TestRunner | None = ..., + module: None | str | ModuleType = "__main__", + defaultTest: str | Iterable[str] | None = None, + argv: list[str] | None = None, + testRunner: type[_TestRunner] | _TestRunner | None = None, testLoader: unittest.loader.TestLoader = ..., - exit: bool = ..., - verbosity: int = ..., - failfast: bool | None = ..., - catchbreak: bool | None = ..., - buffer: bool | None = ..., - warnings: str | None = ..., + exit: bool = True, + verbosity: int = 1, + failfast: bool | None = None, + catchbreak: bool | None = None, + buffer: bool | None = None, + warnings: str | None = None, *, - tb_locals: bool = ..., + tb_locals: bool = False, ) -> None: ... - def usageExit(self, msg: Any = ...) -> None: ... + def usageExit(self, msg: Any = None) -> None: ... def parseArgs(self, argv: list[str]) -> None: ... - def createTests(self, from_discovery: bool = ..., Loader: unittest.loader.TestLoader | None = ...) -> None: ... + def createTests(self, from_discovery: bool = False, Loader: unittest.loader.TestLoader | None = None) -> None: ... def runTests(self) -> None: ... # undocumented main = TestProgram diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 4732994594f8..f0345c903a3b 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -1,14 +1,15 @@ import sys -from _typeshed import Self -from collections.abc import Awaitable, Callable, Iterable, Mapping, Sequence +from collections.abc import Awaitable, Callable, Coroutine, Iterable, Mapping, Sequence from contextlib import _GeneratorContextManager from types import TracebackType from typing import Any, Generic, TypeVar, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Final, Literal, Self, TypeAlias _T = TypeVar("_T") _TT = TypeVar("_TT", bound=type[Any]) _R = TypeVar("_R") +_F = TypeVar("_F", bound=Callable[..., Any]) +_AF = TypeVar("_AF", bound=Callable[..., Coroutine[Any, Any, Any]]) if sys.version_info >= (3, 8): __all__ = ( @@ -46,7 +47,7 @@ else: "seal", ) -__version__: str +__version__: Final[str] FILTER_DIR: Any @@ -55,7 +56,6 @@ class _SentinelObject: def __init__(self, name: Any) -> None: ... class _Sentinel: - def __init__(self) -> None: ... def __getattr__(self, name: str) -> Any: ... sentinel: Any @@ -67,23 +67,23 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs class _Call(tuple[Any, ...]): def __new__( - cls: type[Self], - value: _CallValue = ..., - name: str | None = ..., - parent: Any | None = ..., - two: bool = ..., - from_kall: bool = ..., + cls, value: _CallValue = ..., name: str | None = "", parent: Any | None = None, two: bool = False, from_kall: bool = True ) -> Self: ... name: Any parent: Any from_kall: Any def __init__( - self, value: _CallValue = ..., name: str | None = ..., parent: Any | None = ..., two: bool = ..., from_kall: bool = ... + self, + value: _CallValue = ..., + name: str | None = None, + parent: Any | None = None, + two: bool = False, + from_kall: bool = True, ) -> None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, __other: object) -> bool: ... def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... - def __getattr__(self, attr: Any) -> Any: ... + def __getattr__(self, attr: str) -> Any: ... def __getattribute__(self, attr: str) -> Any: ... if sys.version_info >= (3, 8): @property @@ -101,21 +101,23 @@ class _CallList(list[_Call]): class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... +# We subclass with "Any" because mocks are explicitly designed to stand in for other types, +# something that can't be expressed with our static type system. class NonCallableMock(Base, Any): - def __new__(__cls: type[Self], *args: Any, **kw: Any) -> Self: ... + def __new__(__cls, *args: Any, **kw: Any) -> Self: ... def __init__( self, - spec: list[str] | object | type[object] | None = ..., - wraps: Any | None = ..., - name: str | None = ..., - spec_set: list[str] | object | type[object] | None = ..., - parent: NonCallableMock | None = ..., - _spec_state: Any | None = ..., - _new_name: str = ..., - _new_parent: NonCallableMock | None = ..., - _spec_as_instance: bool = ..., - _eat_self: bool | None = ..., - unsafe: bool = ..., + spec: list[str] | object | type[object] | None = None, + wraps: Any | None = None, + name: str | None = None, + spec_set: list[str] | object | type[object] | None = None, + parent: NonCallableMock | None = None, + _spec_state: Any | None = None, + _new_name: str = "", + _new_parent: NonCallableMock | None = None, + _spec_as_instance: bool = False, + _eat_self: bool | None = None, + unsafe: bool = False, **kwargs: Any, ) -> None: ... def __getattr__(self, name: str) -> Any: ... @@ -123,11 +125,11 @@ class NonCallableMock(Base, Any): def __setattr__(self, name: str, value: Any) -> None: ... def __dir__(self) -> list[str]: ... if sys.version_info >= (3, 8): - def _calls_repr(self, prefix: str = ...) -> str: ... + def _calls_repr(self, prefix: str = "Calls") -> str: ... def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... def assert_not_called(self) -> None: ... def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ... - def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = ...) -> str: ... + def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = "call") -> str: ... else: def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ... def assert_not_called(_mock_self) -> None: ... @@ -140,13 +142,13 @@ class NonCallableMock(Base, Any): def assert_called(_mock_self) -> None: ... def assert_called_once(_mock_self) -> None: ... - def reset_mock(self, visited: Any = ..., *, return_value: bool = ..., side_effect: bool = ...) -> None: ... + def reset_mock(self, visited: Any = None, *, return_value: bool = False, side_effect: bool = False) -> None: ... def _extract_mock_name(self) -> str: ... def _get_call_signature_from_name(self, name: str) -> Any: ... def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... - def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = ...) -> None: ... - def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... - def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = ..., _eat_self: bool = ...) -> None: ... + def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = False) -> None: ... + def mock_add_spec(self, spec: Any, spec_set: bool = False) -> None: ... + def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = False, _eat_self: bool = False) -> None: ... def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ... def configure_mock(self, **kwargs: Any) -> None: ... return_value: Any @@ -164,16 +166,16 @@ class CallableMixin(Base): side_effect: Any def __init__( self, - spec: Any | None = ..., - side_effect: Any | None = ..., + spec: Any | None = None, + side_effect: Any | None = None, return_value: Any = ..., - wraps: Any | None = ..., - name: Any | None = ..., - spec_set: Any | None = ..., - parent: Any | None = ..., - _spec_state: Any | None = ..., - _new_name: Any = ..., - _new_parent: Any | None = ..., + wraps: Any | None = None, + name: Any | None = None, + spec_set: Any | None = None, + parent: Any | None = None, + _spec_state: Any | None = None, + _new_name: Any = "", + _new_parent: Any | None = None, **kwargs: Any, ) -> None: ... if sys.version_info >= (3, 8): @@ -211,7 +213,7 @@ class _patch(Generic[_T]): new_callable: Any | None, kwargs: Mapping[str, Any], *, - unsafe: bool = ..., + unsafe: bool = False, ) -> None: ... else: def __init__( @@ -257,8 +259,12 @@ class _patch_dict: in_dict: Any values: Any clear: Any - def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ... + def __init__(self, in_dict: Any, values: Any = ..., clear: Any = False, **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... + if sys.version_info >= (3, 10): + def decorate_callable(self, f: _F) -> _F: ... + def decorate_async_callable(self, f: _AF) -> _AF: ... + def decorate_class(self, klass: Any) -> Any: ... def __enter__(self) -> Any: ... def __exit__(self, *args: object) -> Any: ... @@ -301,8 +307,8 @@ class _patcher: **kwargs: Any, ) -> _patch[_Mock]: ... @overload + @staticmethod def object( # type: ignore[misc] - self, target: Any, attribute: str, new: _T, @@ -314,8 +320,8 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload + @staticmethod def object( - self, target: Any, attribute: str, *, @@ -326,8 +332,8 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[_Mock]: ... + @staticmethod def multiple( - self, target: Any, spec: Any | None = ..., create: bool = ..., @@ -336,18 +342,16 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[Any]: ... - def stopall(self) -> None: ... + @staticmethod + def stopall() -> None: ... patch: _patcher class MagicMixin: def __init__(self, *args: Any, **kw: Any) -> None: ... -class NonCallableMagicMock(MagicMixin, NonCallableMock): - def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... - -class MagicMock(MagicMixin, Mock): - def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... +class NonCallableMagicMock(MagicMixin, NonCallableMock): ... +class MagicMock(MagicMixin, Mock): ... if sys.version_info >= (3, 8): class AsyncMockMixin(Base): @@ -358,7 +362,7 @@ if sys.version_info >= (3, 8): def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ... def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ... - def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = ...) -> None: ... + def assert_has_awaits(self, calls: Iterable[_Call], any_order: bool = False) -> None: ... def assert_not_awaited(self) -> None: ... def reset_mock(self, *args: Any, **kwargs: Any) -> None: ... await_count: int @@ -378,7 +382,7 @@ class MagicProxy: def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def create_mock(self) -> Any: ... - def __get__(self, obj: Any, _type: Any | None = ...) -> Any: ... + def __get__(self, obj: Any, _type: Any | None = None) -> Any: ... class _ANY: def __eq__(self, other: object) -> Literal[True]: ... @@ -389,18 +393,23 @@ ANY: Any if sys.version_info >= (3, 10): def create_autospec( spec: Any, - spec_set: Any = ..., - instance: Any = ..., - _parent: Any | None = ..., - _name: Any | None = ..., + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, *, - unsafe: bool = ..., + unsafe: bool = False, **kwargs: Any, ) -> Any: ... else: def create_autospec( - spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Any | None = ..., _name: Any | None = ..., **kwargs: Any + spec: Any, + spec_set: Any = False, + instance: Any = False, + _parent: Any | None = None, + _name: Any | None = None, + **kwargs: Any, ) -> Any: ... class _SpecState: @@ -413,20 +422,20 @@ class _SpecState: def __init__( self, spec: Any, - spec_set: Any = ..., - parent: Any | None = ..., - name: Any | None = ..., - ids: Any | None = ..., - instance: Any = ..., + spec_set: Any = False, + parent: Any | None = None, + name: Any | None = None, + ids: Any | None = None, + instance: Any = False, ) -> None: ... -def mock_open(mock: Any | None = ..., read_data: Any = ...) -> Any: ... +def mock_open(mock: Any | None = None, read_data: Any = "") -> Any: ... class PropertyMock(Mock): if sys.version_info >= (3, 8): - def __get__(self: Self, obj: _T, obj_type: type[_T] | None = ...) -> Self: ... + def __get__(self, obj: _T, obj_type: type[_T] | None = None) -> Self: ... else: - def __get__(self: Self, obj: _T, obj_type: type[_T] | None) -> Self: ... + def __get__(self, obj: _T, obj_type: type[_T] | None) -> Self: ... def __set__(self, obj: Any, value: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/result.pyi b/mypy/typeshed/stdlib/unittest/result.pyi index 5dfec13cb52c..8d78bc0f7dcf 100644 --- a/mypy/typeshed/stdlib/unittest/result.pyi +++ b/mypy/typeshed/stdlib/unittest/result.pyi @@ -22,7 +22,7 @@ class TestResult: buffer: bool failfast: bool tb_locals: bool - def __init__(self, stream: TextIO | None = ..., descriptions: bool | None = ..., verbosity: int | None = ...) -> None: ... + def __init__(self, stream: TextIO | None = None, descriptions: bool | None = None, verbosity: int | None = None) -> None: ... def printErrors(self) -> None: ... def wasSuccessful(self) -> bool: ... def stop(self) -> None: ... diff --git a/mypy/typeshed/stdlib/unittest/runner.pyi b/mypy/typeshed/stdlib/unittest/runner.pyi index 1f1b89bc1bee..c0ddcdb49208 100644 --- a/mypy/typeshed/stdlib/unittest/runner.pyi +++ b/mypy/typeshed/stdlib/unittest/runner.pyi @@ -16,22 +16,21 @@ class TextTestResult(unittest.result.TestResult): stream: TextIO # undocumented def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... def getDescription(self, test: unittest.case.TestCase) -> str: ... - def printErrors(self) -> None: ... def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... class TextTestRunner: resultclass: _ResultClassType def __init__( self, - stream: TextIO | None = ..., - descriptions: bool = ..., - verbosity: int = ..., - failfast: bool = ..., - buffer: bool = ..., - resultclass: _ResultClassType | None = ..., - warnings: type[Warning] | None = ..., + stream: TextIO | None = None, + descriptions: bool = True, + verbosity: int = 1, + failfast: bool = False, + buffer: bool = False, + resultclass: _ResultClassType | None = None, + warnings: type[Warning] | None = None, *, - tb_locals: bool = ..., + tb_locals: bool = False, ) -> None: ... def _makeResult(self) -> unittest.result.TestResult: ... def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ... diff --git a/mypy/typeshed/stdlib/unittest/signals.pyi b/mypy/typeshed/stdlib/unittest/signals.pyi index 89e108d926a6..a60133ada9d9 100644 --- a/mypy/typeshed/stdlib/unittest/signals.pyi +++ b/mypy/typeshed/stdlib/unittest/signals.pyi @@ -10,6 +10,6 @@ def installHandler() -> None: ... def registerResult(result: unittest.result.TestResult) -> None: ... def removeResult(result: unittest.result.TestResult) -> bool: ... @overload -def removeHandler(method: None = ...) -> None: ... +def removeHandler(method: None = None) -> None: ... @overload def removeHandler(method: Callable[_P, _T]) -> Callable[_P, _T]: ... diff --git a/mypy/typeshed/stdlib/unittest/suite.pyi b/mypy/typeshed/stdlib/unittest/suite.pyi index 26bef658f1cd..f6b8ef003518 100644 --- a/mypy/typeshed/stdlib/unittest/suite.pyi +++ b/mypy/typeshed/stdlib/unittest/suite.pyi @@ -19,4 +19,4 @@ class BaseTestSuite(Iterable[_TestType]): def __eq__(self, other: object) -> bool: ... class TestSuite(BaseTestSuite): - def run(self, result: unittest.result.TestResult, debug: bool = ...) -> unittest.result.TestResult: ... + def run(self, result: unittest.result.TestResult, debug: bool = False) -> unittest.result.TestResult: ... diff --git a/mypy/typeshed/stdlib/unittest/util.pyi b/mypy/typeshed/stdlib/unittest/util.pyi index f62c728760ff..845accfebedd 100644 --- a/mypy/typeshed/stdlib/unittest/util.pyi +++ b/mypy/typeshed/stdlib/unittest/util.pyi @@ -14,7 +14,7 @@ _MIN_DIFF_LEN: int def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... -def safe_repr(obj: object, short: bool = ...) -> str: ... +def safe_repr(obj: object, short: bool = False) -> str: ... def strclass(cls: type) -> str: ... def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... diff --git a/mypy/typeshed/stdlib/urllib/error.pyi b/mypy/typeshed/stdlib/urllib/error.pyi index 7a4de10d7cf6..89cec9bf289c 100644 --- a/mypy/typeshed/stdlib/urllib/error.pyi +++ b/mypy/typeshed/stdlib/urllib/error.pyi @@ -4,13 +4,15 @@ from urllib.response import addinfourl __all__ = ["URLError", "HTTPError", "ContentTooShortError"] -class URLError(IOError): +class URLError(OSError): reason: str | BaseException - def __init__(self, reason: str | BaseException, filename: str | None = ...) -> None: ... + def __init__(self, reason: str | BaseException, filename: str | None = None) -> None: ... class HTTPError(URLError, addinfourl): @property - def headers(self) -> Message: ... # type: ignore[override] + def headers(self) -> Message: ... + @headers.setter + def headers(self, headers: Message) -> None: ... @property def reason(self) -> str: ... # type: ignore[override] code: int diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index 7e1ec903a15e..50c5d44cdd80 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -1,7 +1,7 @@ import sys -from collections.abc import Callable, Mapping, Sequence -from typing import Any, AnyStr, Generic, NamedTuple, overload -from typing_extensions import TypeAlias +from collections.abc import Callable, Iterable, Mapping, Sequence +from typing import Any, AnyStr, Generic, NamedTuple, TypeVar, overload +from typing_extensions import Literal, TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -30,8 +30,6 @@ __all__ = [ "SplitResultBytes", ] -_Str: TypeAlias = bytes | str - uses_relative: list[str] uses_netloc: list[str] uses_params: list[str] @@ -46,10 +44,10 @@ class _ResultMixinBase(Generic[AnyStr]): def geturl(self) -> AnyStr: ... class _ResultMixinStr(_ResultMixinBase[str]): - def encode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinBytes: ... + def encode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinBytes: ... class _ResultMixinBytes(_ResultMixinBase[bytes]): - def decode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinStr: ... + def decode(self, encoding: str = "ascii", errors: str = "strict") -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): @property @@ -118,55 +116,100 @@ class ParseResultBytes(_ParseResultBytesBase, _NetlocResultMixinBytes): ... def parse_qs( qs: AnyStr | None, - keep_blank_values: bool = ..., - strict_parsing: bool = ..., - encoding: str = ..., - errors: str = ..., - max_num_fields: int | None = ..., - separator: str = ..., + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", ) -> dict[AnyStr, list[AnyStr]]: ... def parse_qsl( qs: AnyStr | None, - keep_blank_values: bool = ..., - strict_parsing: bool = ..., - encoding: str = ..., - errors: str = ..., - max_num_fields: int | None = ..., - separator: str = ..., + keep_blank_values: bool = False, + strict_parsing: bool = False, + encoding: str = "utf-8", + errors: str = "replace", + max_num_fields: int | None = None, + separator: str = "&", ) -> list[tuple[AnyStr, AnyStr]]: ... @overload -def quote(string: str, safe: _Str = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote(string: str, safe: str | Iterable[int] = "/", encoding: str | None = None, errors: str | None = None) -> str: ... @overload -def quote(string: bytes, safe: _Str = ...) -> str: ... -def quote_from_bytes(bs: bytes, safe: _Str = ...) -> str: ... +def quote(string: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... +def quote_from_bytes(bs: bytes | bytearray, safe: str | Iterable[int] = "/") -> str: ... @overload -def quote_plus(string: str, safe: _Str = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... +def quote_plus(string: str, safe: str | Iterable[int] = "", encoding: str | None = None, errors: str | None = None) -> str: ... @overload -def quote_plus(string: bytes, safe: _Str = ...) -> str: ... -def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ... -def unquote_to_bytes(string: _Str) -> bytes: ... -def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ... +def quote_plus(string: bytes | bytearray, safe: str | Iterable[int] = "") -> str: ... + +if sys.version_info >= (3, 9): + def unquote(string: str | bytes, encoding: str = "utf-8", errors: str = "replace") -> str: ... + +else: + def unquote(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... + +def unquote_to_bytes(string: str | bytes | bytearray) -> bytes: ... +def unquote_plus(string: str, encoding: str = "utf-8", errors: str = "replace") -> str: ... @overload def urldefrag(url: str) -> DefragResult: ... @overload -def urldefrag(url: bytes | None) -> DefragResultBytes: ... +def urldefrag(url: bytes | bytearray | None) -> DefragResultBytes: ... + +_Q = TypeVar("_Q", bound=str | Iterable[int]) +_QueryType: TypeAlias = ( + Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]] +) + +@overload def urlencode( - query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]], - doseq: bool = ..., - safe: _Str = ..., - encoding: str = ..., - errors: str = ..., - quote_via: Callable[[AnyStr, _Str, str, str], str] = ..., + query: _QueryType, + doseq: bool = False, + safe: str = "", + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, str, str, str], str] = ..., ) -> str: ... -def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> AnyStr: ... @overload -def urlparse(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> ParseResult: ... +def urlencode( + query: _QueryType, + doseq: bool, + safe: _Q, + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., +) -> str: ... +@overload +def urlencode( + query: _QueryType, + doseq: bool = False, + *, + safe: _Q, + encoding: str | None = None, + errors: str | None = None, + quote_via: Callable[[AnyStr, _Q, str, str], str] = ..., +) -> str: ... +def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = True) -> AnyStr: ... @overload -def urlparse(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> ParseResultBytes: ... +def urlparse(url: str, scheme: str = "", allow_fragments: bool = True) -> ParseResult: ... @overload -def urlsplit(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> SplitResult: ... +def urlparse( + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True +) -> ParseResultBytes: ... @overload -def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... +def urlsplit(url: str, scheme: str = "", allow_fragments: bool = True) -> SplitResult: ... + +if sys.version_info >= (3, 11): + @overload + def urlsplit( + url: bytes | None, scheme: bytes | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... + +else: + @overload + def urlsplit( + url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True + ) -> SplitResultBytes: ... + @overload def urlunparse( components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 88f4f5250e67..09ce27961999 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -1,6 +1,6 @@ import ssl import sys -from _typeshed import StrOrBytesPath, SupportsRead +from _typeshed import ReadableBuffer, StrOrBytesPath, SupportsRead from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from email.message import Message from http.client import HTTPConnection, HTTPMessage, HTTPResponse @@ -50,17 +50,17 @@ __all__ = [ _T = TypeVar("_T") _UrlopenRet: TypeAlias = Any -_DataType: TypeAlias = bytes | SupportsRead[bytes] | Iterable[bytes] | None +_DataType: TypeAlias = ReadableBuffer | SupportsRead[bytes] | Iterable[bytes] | None def urlopen( url: str | Request, - data: _DataType | None = ..., + data: _DataType | None = None, timeout: float | None = ..., *, - cafile: str | None = ..., - capath: str | None = ..., - cadefault: bool = ..., - context: ssl.SSLContext | None = ..., + cafile: str | None = None, + capath: str | None = None, + cadefault: bool = False, + context: ssl.SSLContext | None = None, ) -> _UrlopenRet: ... def install_opener(opener: OpenerDirector) -> None: ... def build_opener(*handlers: BaseHandler | Callable[[], BaseHandler]) -> OpenerDirector: ... @@ -79,7 +79,7 @@ if sys.platform == "win32" or sys.platform == "darwin": def proxy_bypass(host: str) -> Any: ... # undocumented else: - def proxy_bypass(host: str, proxies: Mapping[str, str] | None = ...) -> Any: ... # undocumented + def proxy_bypass(host: str, proxies: Mapping[str, str] | None = None) -> Any: ... # undocumented class Request: @property @@ -101,11 +101,11 @@ class Request: def __init__( self, url: str, - data: _DataType = ..., + data: _DataType = None, headers: MutableMapping[str, str] = ..., - origin_req_host: str | None = ..., - unverifiable: bool = ..., - method: str | None = ..., + origin_req_host: str | None = None, + unverifiable: bool = False, + method: str | None = None, ) -> None: ... def get_method(self) -> str: ... def add_header(self, key: str, val: str) -> None: ... @@ -124,7 +124,7 @@ class Request: class OpenerDirector: addheaders: list[tuple[str, str]] def add_handler(self, handler: BaseHandler) -> None: ... - def open(self, fullurl: str | Request, data: _DataType = ..., timeout: float | None = ...) -> _UrlopenRet: ... + def open(self, fullurl: str | Request, data: _DataType = None, timeout: float | None = ...) -> _UrlopenRet: ... def error(self, proto: str, *args: Any) -> _UrlopenRet: ... def close(self) -> None: ... @@ -158,14 +158,14 @@ class HTTPRedirectHandler(BaseHandler): class HTTPCookieProcessor(BaseHandler): cookiejar: CookieJar - def __init__(self, cookiejar: CookieJar | None = ...) -> None: ... + def __init__(self, cookiejar: CookieJar | None = None) -> None: ... def http_request(self, request: Request) -> Request: ... # undocumented def http_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented def https_request(self, request: Request) -> Request: ... # undocumented def https_response(self, request: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented class ProxyHandler(BaseHandler): - def __init__(self, proxies: dict[str, str] | None = ...) -> None: ... + def __init__(self, proxies: dict[str, str] | None = None) -> None: ... def proxy_open(self, req: Request, proxy: str, type: str) -> _UrlopenRet | None: ... # undocumented # TODO add a method for every (common) proxy protocol @@ -173,7 +173,7 @@ class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... def is_suburi(self, base: str, test: str) -> bool: ... # undocumented - def reduce_uri(self, uri: str, default_port: bool = ...) -> str: ... # undocumented + def reduce_uri(self, uri: str, default_port: bool = True) -> str: ... # undocumented class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... @@ -181,16 +181,16 @@ class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): def add_password( - self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = ... + self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str, is_authenticated: bool = False ) -> None: ... - def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = ...) -> None: ... + def update_authenticated(self, uri: str | Sequence[str], is_authenticated: bool = False) -> None: ... def is_authenticated(self, authuri: str) -> bool: ... class AbstractBasicAuthHandler: rx: ClassVar[Pattern[str]] # undocumented passwd: HTTPPasswordMgr add_password: Callable[[str, str | Sequence[str], str, str], None] - def __init__(self, password_mgr: HTTPPasswordMgr | None = ...) -> None: ... + def __init__(self, password_mgr: HTTPPasswordMgr | None = None) -> None: ... def http_error_auth_reqed(self, authreq: str, host: str, req: Request, headers: HTTPMessage) -> None: ... def http_request(self, req: Request) -> Request: ... # undocumented def http_response(self, req: Request, response: HTTPResponse) -> HTTPResponse: ... # undocumented @@ -207,14 +207,14 @@ class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler): def http_error_407(self, req: Request, fp: IO[bytes], code: int, msg: str, headers: HTTPMessage) -> _UrlopenRet | None: ... class AbstractDigestAuthHandler: - def __init__(self, passwd: HTTPPasswordMgr | None = ...) -> None: ... + def __init__(self, passwd: HTTPPasswordMgr | None = None) -> None: ... def reset_retry_count(self) -> None: ... def http_error_auth_reqed(self, auth_header: str, host: str, req: Request, headers: HTTPMessage) -> None: ... def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ... def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... - def get_entity_digest(self, data: bytes | None, chal: Mapping[str, str]) -> str | None: ... + def get_entity_digest(self, data: ReadableBuffer | None, chal: Mapping[str, str]) -> str | None: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): auth_header: ClassVar[str] # undocumented @@ -235,7 +235,7 @@ class _HTTPConnectionProtocol(Protocol): ) -> HTTPConnection: ... class AbstractHTTPHandler(BaseHandler): # undocumented - def __init__(self, debuglevel: int = ...) -> None: ... + def __init__(self, debuglevel: int = 0) -> None: ... def set_http_debuglevel(self, level: int) -> None: ... def do_request_(self, request: Request) -> Request: ... def do_open(self, http_class: _HTTPConnectionProtocol, req: Request, **http_conn_args: Any) -> HTTPResponse: ... @@ -246,7 +246,7 @@ class HTTPHandler(AbstractHTTPHandler): class HTTPSHandler(AbstractHTTPHandler): def __init__( - self, debuglevel: int = ..., context: ssl.SSLContext | None = ..., check_hostname: bool | None = ... + self, debuglevel: int = 0, context: ssl.SSLContext | None = None, check_hostname: bool | None = None ) -> None: ... def https_open(self, req: Request) -> HTTPResponse: ... def https_request(self, request: Request) -> Request: ... # undocumented @@ -262,7 +262,7 @@ class DataHandler(BaseHandler): class ftpwrapper: # undocumented def __init__( - self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = ..., persistent: bool = ... + self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float | None = None, persistent: bool = True ) -> None: ... def close(self) -> None: ... def endtransfer(self) -> None: ... @@ -282,9 +282,6 @@ class CacheFTPHandler(FTPHandler): def setMaxConns(self, m: int) -> None: ... def check_cache(self) -> None: ... # undocumented def clear_cache(self) -> None: ... # undocumented - def connect_ftp( - self, user: str, passwd: str, host: str, port: int, dirs: str, timeout: float - ) -> ftpwrapper: ... # undocumented class UnknownHandler(BaseHandler): def unknown_open(self, req: Request) -> NoReturn: ... @@ -295,59 +292,59 @@ class HTTPErrorProcessor(BaseHandler): def urlretrieve( url: str, - filename: StrOrBytesPath | None = ..., - reporthook: Callable[[int, int, int], object] | None = ..., - data: _DataType = ..., + filename: StrOrBytesPath | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: _DataType = None, ) -> tuple[str, HTTPMessage]: ... def urlcleanup() -> None: ... class URLopener: version: ClassVar[str] - def __init__(self, proxies: dict[str, str] | None = ..., **x509: str) -> None: ... - def open(self, fullurl: str, data: bytes | None = ...) -> _UrlopenRet: ... - def open_unknown(self, fullurl: str, data: bytes | None = ...) -> _UrlopenRet: ... + def __init__(self, proxies: dict[str, str] | None = None, **x509: str) -> None: ... + def open(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... + def open_unknown(self, fullurl: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... def retrieve( self, url: str, - filename: str | None = ..., - reporthook: Callable[[int, int, int], object] | None = ..., - data: bytes | None = ..., + filename: str | None = None, + reporthook: Callable[[int, int, int], object] | None = None, + data: ReadableBuffer | None = None, ) -> tuple[str, Message | None]: ... def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented def cleanup(self) -> None: ... # undocumented def close(self) -> None: ... # undocumented def http_error( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = None ) -> _UrlopenRet: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> _UrlopenRet: ... # undocumented - def open_data(self, url: str, data: bytes | None = ...) -> addinfourl: ... # undocumented + def open_data(self, url: str, data: ReadableBuffer | None = None) -> addinfourl: ... # undocumented def open_file(self, url: str) -> addinfourl: ... # undocumented def open_ftp(self, url: str) -> addinfourl: ... # undocumented - def open_http(self, url: str, data: bytes | None = ...) -> _UrlopenRet: ... # undocumented - def open_https(self, url: str, data: bytes | None = ...) -> _UrlopenRet: ... # undocumented + def open_http(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented + def open_https(self, url: str, data: ReadableBuffer | None = None) -> _UrlopenRet: ... # undocumented def open_local_file(self, url: str) -> addinfourl: ... # undocumented - def open_unknown_proxy(self, proxy: str, fullurl: str, data: bytes | None = ...) -> None: ... # undocumented + def open_unknown_proxy(self, proxy: str, fullurl: str, data: ReadableBuffer | None = None) -> None: ... # undocumented class FancyURLopener(URLopener): def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... - def get_user_passwd(self, host: str, realm: str, clear_cache: int = ...) -> tuple[str, str]: ... # undocumented + def get_user_passwd(self, host: str, realm: str, clear_cache: int = 0) -> tuple[str, str]: ... # undocumented def http_error_301( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_302( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_303( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_307( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented if sys.version_info >= (3, 11): def http_error_308( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None = None ) -> _UrlopenRet | addinfourl | None: ... # undocumented def http_error_401( @@ -357,8 +354,8 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: bytes | None = ..., - retry: bool = ..., + data: ReadableBuffer | None = None, + retry: bool = False, ) -> _UrlopenRet | None: ... # undocumented def http_error_407( self, @@ -367,20 +364,24 @@ class FancyURLopener(URLopener): errcode: int, errmsg: str, headers: HTTPMessage, - data: bytes | None = ..., - retry: bool = ..., + data: ReadableBuffer | None = None, + retry: bool = False, ) -> _UrlopenRet | None: ... # undocumented def http_error_default( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage ) -> addinfourl: ... # undocumented def redirect_internal( - self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None + self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: ReadableBuffer | None + ) -> _UrlopenRet | None: ... # undocumented + def retry_http_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None + ) -> _UrlopenRet | None: ... # undocumented + def retry_https_basic_auth( + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented - def retry_http_basic_auth(self, url: str, realm: str, data: bytes | None = ...) -> _UrlopenRet | None: ... # undocumented - def retry_https_basic_auth(self, url: str, realm: str, data: bytes | None = ...) -> _UrlopenRet | None: ... # undocumented def retry_proxy_http_basic_auth( - self, url: str, realm: str, data: bytes | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented def retry_proxy_https_basic_auth( - self, url: str, realm: str, data: bytes | None = ... + self, url: str, realm: str, data: ReadableBuffer | None = None ) -> _UrlopenRet | None: ... # undocumented diff --git a/mypy/typeshed/stdlib/urllib/response.pyi b/mypy/typeshed/stdlib/urllib/response.pyi index 8c9a600f3c48..61ba687076b2 100644 --- a/mypy/typeshed/stdlib/urllib/response.pyi +++ b/mypy/typeshed/stdlib/urllib/response.pyi @@ -1,20 +1,21 @@ import sys -from _typeshed import Self +from _typeshed import ReadableBuffer from collections.abc import Callable, Iterable from email.message import Message from types import TracebackType from typing import IO, Any, BinaryIO +from typing_extensions import Self __all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] class addbase(BinaryIO): fp: IO[bytes] def __init__(self, fp: IO[bytes]) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self: Self) -> Self: ... + def __iter__(self) -> Self: ... def __next__(self) -> bytes: ... def close(self) -> None: ... # These methods don't actually exist, but the class inherits at runtime from @@ -33,8 +34,8 @@ class addbase(BinaryIO): def tell(self) -> int: ... def truncate(self, size: int | None = ...) -> int: ... def writable(self) -> bool: ... - def write(self, s: bytes) -> int: ... - def writelines(self, lines: Iterable[bytes]) -> None: ... + def write(self, s: ReadableBuffer) -> int: ... + def writelines(self, lines: Iterable[ReadableBuffer]) -> None: ... class addclosehook(addbase): closehook: Callable[..., object] @@ -53,6 +54,6 @@ class addinfourl(addinfo): @property def status(self) -> int | None: ... - def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = ...) -> None: ... + def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = None) -> None: ... def geturl(self) -> str: ... def getcode(self) -> int | None: ... diff --git a/mypy/typeshed/stdlib/urllib/robotparser.pyi b/mypy/typeshed/stdlib/urllib/robotparser.pyi index 795cf83fcecd..d218c3dc6c0f 100644 --- a/mypy/typeshed/stdlib/urllib/robotparser.pyi +++ b/mypy/typeshed/stdlib/urllib/robotparser.pyi @@ -9,7 +9,7 @@ class RequestRate(NamedTuple): seconds: int class RobotFileParser: - def __init__(self, url: str = ...) -> None: ... + def __init__(self, url: str = "") -> None: ... def set_url(self, url: str) -> None: ... def read(self) -> None: ... def parse(self, lines: Iterable[str]) -> None: ... diff --git a/mypy/typeshed/stdlib/uu.pyi b/mypy/typeshed/stdlib/uu.pyi index 95a7f3dfa9e2..324053e04337 100644 --- a/mypy/typeshed/stdlib/uu.pyi +++ b/mypy/typeshed/stdlib/uu.pyi @@ -7,5 +7,7 @@ _File: TypeAlias = str | BinaryIO class Error(Exception): ... -def encode(in_file: _File, out_file: _File, name: str | None = ..., mode: int | None = ..., *, backtick: bool = ...) -> None: ... -def decode(in_file: _File, out_file: _File | None = ..., mode: int | None = ..., quiet: int = ...) -> None: ... +def encode( + in_file: _File, out_file: _File, name: str | None = None, mode: int | None = None, *, backtick: bool = False +) -> None: ... +def decode(in_file: _File, out_file: _File | None = None, mode: int | None = None, quiet: bool = False) -> None: ... diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi index 3d9b89a0b9f7..249257783626 100644 --- a/mypy/typeshed/stdlib/uuid.pyi +++ b/mypy/typeshed/stdlib/uuid.pyi @@ -1,3 +1,5 @@ +import sys +from _typeshed import Unused from enum import Enum from typing_extensions import TypeAlias @@ -14,12 +16,12 @@ class SafeUUID(Enum): class UUID: def __init__( self, - hex: str | None = ..., - bytes: _Bytes | None = ..., - bytes_le: _Bytes | None = ..., - fields: _FieldsType | None = ..., - int: _Int | None = ..., - version: _Int | None = ..., + hex: str | None = None, + bytes: _Bytes | None = None, + bytes_le: _Bytes | None = None, + fields: _FieldsType | None = None, + int: _Int | None = None, + version: _Int | None = None, *, is_safe: SafeUUID = ..., ) -> None: ... @@ -64,8 +66,13 @@ class UUID: def __gt__(self, other: UUID) -> bool: ... def __ge__(self, other: UUID) -> bool: ... -def getnode() -> int: ... -def uuid1(node: _Int | None = ..., clock_seq: _Int | None = ...) -> UUID: ... +if sys.version_info >= (3, 9): + def getnode() -> int: ... + +else: + def getnode(*, getters: Unused = None) -> int: ... # undocumented + +def uuid1(node: _Int | None = None, clock_seq: _Int | None = None) -> UUID: ... def uuid3(namespace: UUID, name: str) -> UUID: ... def uuid4() -> UUID: ... def uuid5(namespace: UUID, name: str) -> UUID: ... diff --git a/mypy/typeshed/stdlib/venv/__init__.pyi b/mypy/typeshed/stdlib/venv/__init__.pyi index 6afe328ac90d..f184649f10f0 100644 --- a/mypy/typeshed/stdlib/venv/__init__.pyi +++ b/mypy/typeshed/stdlib/venv/__init__.pyi @@ -1,8 +1,11 @@ -from collections.abc import Sequence +import logging import sys from _typeshed import StrOrBytesPath +from collections.abc import Sequence from types import SimpleNamespace +logger: logging.Logger + if sys.version_info >= (3, 9): CORE_VENV_DEPS: tuple[str, ...] @@ -17,23 +20,23 @@ class EnvBuilder: if sys.version_info >= (3, 9): def __init__( self, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - upgrade: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., - upgrade_deps: bool = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + upgrade: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, ) -> None: ... else: def __init__( self, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - upgrade: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + upgrade: bool = False, + with_pip: bool = False, + prompt: str | None = None, ) -> None: ... def create(self, env_dir: StrOrBytesPath) -> None: ... @@ -41,7 +44,7 @@ class EnvBuilder: def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: ... def create_configuration(self, context: SimpleNamespace) -> None: ... def symlink_or_copy( - self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = ... + self, src: StrOrBytesPath, dst: StrOrBytesPath, relative_symlinks_ok: bool = False ) -> None: ... # undocumented def setup_python(self, context: SimpleNamespace) -> None: ... def _setup_pip(self, context: SimpleNamespace) -> None: ... # undocumented @@ -55,22 +58,22 @@ class EnvBuilder: if sys.version_info >= (3, 9): def create( env_dir: StrOrBytesPath, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., - upgrade_deps: bool = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + with_pip: bool = False, + prompt: str | None = None, + upgrade_deps: bool = False, ) -> None: ... else: def create( env_dir: StrOrBytesPath, - system_site_packages: bool = ..., - clear: bool = ..., - symlinks: bool = ..., - with_pip: bool = ..., - prompt: str | None = ..., + system_site_packages: bool = False, + clear: bool = False, + symlinks: bool = False, + with_pip: bool = False, + prompt: str | None = None, ) -> None: ... -def main(args: Sequence[str] | None = ...) -> None: ... +def main(args: Sequence[str] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/warnings.pyi b/mypy/typeshed/stdlib/warnings.pyi index 5cc6b946409b..6222eb65918a 100644 --- a/mypy/typeshed/stdlib/warnings.pyi +++ b/mypy/typeshed/stdlib/warnings.pyi @@ -22,18 +22,20 @@ _ActionKind: TypeAlias = Literal["default", "error", "ignore", "always", "module filters: Sequence[tuple[str, str | None, type[Warning], str | None, int]] # undocumented, do not mutate def showwarning( - message: Warning | str, category: type[Warning], filename: str, lineno: int, file: TextIO | None = ..., line: str | None = ... + message: Warning | str, + category: type[Warning], + filename: str, + lineno: int, + file: TextIO | None = None, + line: str | None = None, ) -> None: ... -def formatwarning(message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = ...) -> str: ... +def formatwarning( + message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = None +) -> str: ... def filterwarnings( - action: _ActionKind, - message: str = ..., - category: type[Warning] = ..., - module: str = ..., - lineno: int = ..., - append: bool = ..., + action: _ActionKind, message: str = "", category: type[Warning] = ..., module: str = "", lineno: int = 0, append: bool = False ) -> None: ... -def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = ..., append: bool = ...) -> None: ... +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = 0, append: bool = False) -> None: ... def resetwarnings() -> None: ... class _OptionError(Exception): ... @@ -52,9 +54,9 @@ class WarningMessage: category: type[Warning], filename: str, lineno: int, - file: TextIO | None = ..., - line: str | None = ..., - source: Any | None = ..., + file: TextIO | None = None, + line: str | None = None, + source: Any | None = None, ) -> None: ... class catch_warnings(Generic[_W]): @@ -63,45 +65,45 @@ class catch_warnings(Generic[_W]): def __init__( self: catch_warnings[None], *, - record: Literal[False] = ..., - module: ModuleType | None = ..., - action: _ActionKind | None = ..., + record: Literal[False] = False, + module: ModuleType | None = None, + action: _ActionKind | None = None, category: type[Warning] = ..., - lineno: int = ..., - append: bool = ..., + lineno: int = 0, + append: bool = False, ) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage]], *, record: Literal[True], - module: ModuleType | None = ..., - action: _ActionKind | None = ..., + module: ModuleType | None = None, + action: _ActionKind | None = None, category: type[Warning] = ..., - lineno: int = ..., - append: bool = ..., + lineno: int = 0, + append: bool = False, ) -> None: ... @overload def __init__( self: catch_warnings[list[WarningMessage] | None], *, record: bool, - module: ModuleType | None = ..., - action: _ActionKind | None = ..., + module: ModuleType | None = None, + action: _ActionKind | None = None, category: type[Warning] = ..., - lineno: int = ..., - append: bool = ..., + lineno: int = 0, + append: bool = False, ) -> None: ... else: @overload - def __init__(self: catch_warnings[None], *, record: Literal[False] = ..., module: ModuleType | None = ...) -> None: ... + def __init__(self: catch_warnings[None], *, record: Literal[False] = False, module: ModuleType | None = None) -> None: ... @overload def __init__( - self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = ... + self: catch_warnings[list[WarningMessage]], *, record: Literal[True], module: ModuleType | None = None ) -> None: ... @overload def __init__( - self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = ... + self: catch_warnings[list[WarningMessage] | None], *, record: bool, module: ModuleType | None = None ) -> None: ... def __enter__(self) -> _W: ... diff --git a/mypy/typeshed/stdlib/wave.pyi b/mypy/typeshed/stdlib/wave.pyi index 853a26a9469e..0d004d6b2d8a 100644 --- a/mypy/typeshed/stdlib/wave.pyi +++ b/mypy/typeshed/stdlib/wave.pyi @@ -1,7 +1,7 @@ import sys -from _typeshed import ReadableBuffer, Self +from _typeshed import ReadableBuffer, Unused from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias if sys.version_info >= (3, 9): __all__ = ["open", "Error", "Wave_read", "Wave_write"] @@ -24,8 +24,8 @@ class _wave_params(NamedTuple): class Wave_read: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def getfp(self) -> BinaryIO | None: ... def rewind(self) -> None: ... def close(self) -> None: ... @@ -44,8 +44,8 @@ class Wave_read: class Wave_write: def __init__(self, f: _File) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, *args: object) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... def setnchannels(self, nchannels: int) -> None: ... def getnchannels(self) -> int: ... def setsampwidth(self, sampwidth: int) -> None: ... @@ -72,7 +72,7 @@ def open(f: _File, mode: Literal["r", "rb"]) -> Wave_read: ... @overload def open(f: _File, mode: Literal["w", "wb"]) -> Wave_write: ... @overload -def open(f: _File, mode: str | None = ...) -> Any: ... +def open(f: _File, mode: str | None = None) -> Any: ... if sys.version_info < (3, 9): openfp = open diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index af960391e85d..1e0aac814dfb 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -1,10 +1,5 @@ import sys -from _typeshed import Self, SupportsKeysAndGetItem -from _weakrefset import WeakSet as WeakSet -from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping -from typing import Any, Generic, TypeVar, overload -from typing_extensions import ParamSpec - +from _typeshed import SupportsKeysAndGetItem from _weakref import ( CallableProxyType as CallableProxyType, ProxyType as ProxyType, @@ -14,6 +9,10 @@ from _weakref import ( proxy as proxy, ref as ref, ) +from _weakrefset import WeakSet as WeakSet +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import ParamSpec, Self __all__ = [ "ref", @@ -42,7 +41,7 @@ _P = ParamSpec("_P") ProxyTypes: tuple[type[Any], ...] class WeakMethod(ref[_CallableT], Generic[_CallableT]): - def __new__(cls: type[Self], meth: _CallableT, callback: Callable[[_CallableT], object] | None = ...) -> Self: ... + def __new__(cls, meth: _CallableT, callback: Callable[[_CallableT], object] | None = None) -> Self: ... def __call__(self) -> _CallableT | None: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... @@ -64,14 +63,14 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __iter__(self) -> Iterator[_KT]: ... def copy(self) -> WeakValueDictionary[_KT, _VT]: ... __copy__ = copy - def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + def setdefault(self, key: _KT, default: _VT) -> _VT: ... # type: ignore[override] @overload def pop(self, key: _KT) -> _VT: ... @overload @@ -81,19 +80,19 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class KeyedRef(ref[_T], Generic[_KT, _T]): key: _KT # This __new__ method uses a non-standard name for the "cls" parameter - def __new__(type: type[Self], ob: _T, callback: Callable[[_T], Any], key: _KT) -> Self: ... + def __new__(type, ob: _T, callback: Callable[[_T], Any], key: _KT) -> Self: ... def __init__(self, ob: _T, callback: Callable[[_T], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): @overload - def __init__(self, dict: None = ...) -> None: ... + def __init__(self, dict: None = None) -> None: ... @overload def __init__(self, dict: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... def __len__(self) -> int: ... @@ -104,13 +103,17 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def __iter__(self) -> Iterator[_KT]: ... def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... __copy__ = copy - def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... # These are incompatible with Mapping def keys(self) -> Iterator[_KT]: ... # type: ignore[override] def values(self) -> Iterator[_VT]: ... # type: ignore[override] def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] def keyrefs(self) -> list[ref[_KT]]: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + # Keep WeakKeyDictionary.setdefault in line with MutableMapping.setdefault, modulo positional-only differences + @overload + def setdefault(self: WeakKeyDictionary[_KT, _VT | None], key: _KT, default: None = None) -> _VT: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... @overload def pop(self, key: _KT) -> _VT: ... @overload @@ -120,13 +123,13 @@ class WeakKeyDictionary(MutableMapping[_KT, _VT]): def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() @overload # type: ignore[misc] - def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... @overload - def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class finalize: # TODO: This is a good candidate for to be a `Generic[_P, _T]` class def __init__(self, __obj: object, __func: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... - def __call__(self, _: Any = ...) -> Any | None: ... + def __call__(self, _: Any = None) -> Any | None: ... def detach(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... def peek(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... @property diff --git a/mypy/typeshed/stdlib/webbrowser.pyi b/mypy/typeshed/stdlib/webbrowser.pyi index 8cf8935ffaad..02edd42e7d59 100644 --- a/mypy/typeshed/stdlib/webbrowser.pyi +++ b/mypy/typeshed/stdlib/webbrowser.pyi @@ -8,10 +8,10 @@ __all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"] class Error(Exception): ... def register( - name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = ..., *, preferred: bool = ... + name: str, klass: Callable[[], BaseBrowser] | None, instance: BaseBrowser | None = None, *, preferred: bool = False ) -> None: ... -def get(using: str | None = ...) -> BaseBrowser: ... -def open(url: str, new: int = ..., autoraise: bool = ...) -> bool: ... +def get(using: str | None = None) -> BaseBrowser: ... +def open(url: str, new: int = 0, autoraise: bool = True) -> bool: ... def open_new(url: str) -> bool: ... def open_new_tab(url: str) -> bool: ... @@ -19,20 +19,20 @@ class BaseBrowser: args: list[str] name: str basename: str - def __init__(self, name: str = ...) -> None: ... + def __init__(self, name: str = "") -> None: ... @abstractmethod - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... def open_new(self, url: str) -> bool: ... def open_new_tab(self, url: str) -> bool: ... class GenericBrowser(BaseBrowser): def __init__(self, name: str | Sequence[str]) -> None: ... - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class BackgroundBrowser(GenericBrowser): ... class UnixBrowser(BaseBrowser): - def open(self, url: str, new: Literal[0, 1, 2] = ..., autoraise: bool = ...) -> bool: ... # type: ignore[override] + def open(self, url: str, new: Literal[0, 1, 2] = 0, autoraise: bool = True) -> bool: ... # type: ignore[override] raise_opts: list[str] | None background: bool redirect_stdout: bool @@ -51,18 +51,23 @@ class Opera(UnixBrowser): ... class Elinks(UnixBrowser): ... class Konqueror(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class Grail(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "win32": class WindowsDefault(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... if sys.platform == "darwin": class MacOSX(BaseBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... + if sys.version_info >= (3, 11): + def __init__(self, name: str = "default") -> None: ... + else: + def __init__(self, name: str) -> None: ... + + def open(self, url: str, new: int = 0, autoraise: bool = True) -> bool: ... diff --git a/mypy/typeshed/stdlib/winreg.pyi b/mypy/typeshed/stdlib/winreg.pyi index 2cc42318f1a4..5b2d09a3bebc 100644 --- a/mypy/typeshed/stdlib/winreg.pyi +++ b/mypy/typeshed/stdlib/winreg.pyi @@ -1,25 +1,24 @@ import sys -from _typeshed import Self from types import TracebackType from typing import Any -from typing_extensions import Literal, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, final if sys.platform == "win32": _KeyType: TypeAlias = HKEYType | int def CloseKey(__hkey: _KeyType) -> None: ... def ConnectRegistry(__computer_name: str | None, __key: _KeyType) -> HKEYType: ... def CreateKey(__key: _KeyType, __sub_key: str | None) -> HKEYType: ... - def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = ..., access: int = ...) -> HKEYType: ... + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = 0, access: int = 131078) -> HKEYType: ... def DeleteKey(__key: _KeyType, __sub_key: str) -> None: ... - def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = ..., reserved: int = ...) -> None: ... + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = 256, reserved: int = 0) -> None: ... def DeleteValue(__key: _KeyType, __value: str) -> None: ... def EnumKey(__key: _KeyType, __index: int) -> str: ... def EnumValue(__key: _KeyType, __index: int) -> tuple[str, Any, int]: ... def ExpandEnvironmentStrings(__str: str) -> str: ... def FlushKey(__key: _KeyType) -> None: ... def LoadKey(__key: _KeyType, __sub_key: str, __file_name: str) -> None: ... - def OpenKey(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... - def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... + def OpenKey(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = 0, access: int = 131097) -> HKEYType: ... def QueryInfoKey(__key: _KeyType) -> tuple[int, int, int]: ... def QueryValue(__key: _KeyType, __sub_key: str | None) -> str: ... def QueryValueEx(__key: _KeyType, __name: str) -> tuple[Any, int]: ... @@ -93,7 +92,7 @@ if sys.platform == "win32": class HKEYType: def __bool__(self) -> bool: ... def __int__(self) -> int: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... diff --git a/mypy/typeshed/stdlib/winsound.pyi b/mypy/typeshed/stdlib/winsound.pyi index 588bd5969e98..9b2b57a38986 100644 --- a/mypy/typeshed/stdlib/winsound.pyi +++ b/mypy/typeshed/stdlib/winsound.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import ReadableBuffer from typing import overload from typing_extensions import Literal @@ -21,7 +22,7 @@ if sys.platform == "win32": def Beep(frequency: int, duration: int) -> None: ... # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload - def PlaySound(sound: bytes | None, flags: Literal[4]) -> None: ... + def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ... @overload - def PlaySound(sound: str | bytes | None, flags: int) -> None: ... - def MessageBeep(type: int = ...) -> None: ... + def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ... + def MessageBeep(type: int = 0) -> None: ... diff --git a/mypy/typeshed/stdlib/wsgiref/handlers.pyi b/mypy/typeshed/stdlib/wsgiref/handlers.pyi index 655fba668598..ebead540018e 100644 --- a/mypy/typeshed/stdlib/wsgiref/handlers.pyi +++ b/mypy/typeshed/stdlib/wsgiref/handlers.pyi @@ -38,7 +38,7 @@ class BaseHandler: def set_content_length(self) -> None: ... def cleanup_headers(self) -> None: ... def start_response( - self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = ... + self, status: str, headers: list[tuple[str, str]], exc_info: OptExcInfo | None = None ) -> Callable[[bytes], None]: ... def send_preamble(self) -> None: ... def write(self, data: bytes) -> None: ... @@ -73,8 +73,8 @@ class SimpleHandler(BaseHandler): stdout: IO[bytes], stderr: ErrorStream, environ: MutableMapping[str, str], - multithread: bool = ..., - multiprocess: bool = ..., + multithread: bool = True, + multiprocess: bool = False, ) -> None: ... def get_stdin(self) -> InputStream: ... def get_stderr(self) -> ErrorStream: ... diff --git a/mypy/typeshed/stdlib/wsgiref/headers.pyi b/mypy/typeshed/stdlib/wsgiref/headers.pyi index dd963d9b4727..2654d79bf4e5 100644 --- a/mypy/typeshed/stdlib/wsgiref/headers.pyi +++ b/mypy/typeshed/stdlib/wsgiref/headers.pyi @@ -7,7 +7,7 @@ _HeaderList: TypeAlias = list[tuple[str, str]] tspecials: Pattern[str] # undocumented class Headers: - def __init__(self, headers: _HeaderList | None = ...) -> None: ... + def __init__(self, headers: _HeaderList | None = None) -> None: ... def __len__(self) -> int: ... def __setitem__(self, name: str, val: str) -> None: ... def __delitem__(self, name: str) -> None: ... @@ -17,7 +17,7 @@ class Headers: @overload def get(self, name: str, default: str) -> str: ... @overload - def get(self, name: str, default: str | None = ...) -> str | None: ... + def get(self, name: str, default: str | None = None) -> str | None: ... def keys(self) -> list[str]: ... def values(self) -> list[str]: ... def items(self) -> _HeaderList: ... diff --git a/mypy/typeshed/stdlib/wsgiref/simple_server.pyi b/mypy/typeshed/stdlib/wsgiref/simple_server.pyi index 1dc84e9fbebe..547f562cc1d4 100644 --- a/mypy/typeshed/stdlib/wsgiref/simple_server.pyi +++ b/mypy/typeshed/stdlib/wsgiref/simple_server.pyi @@ -12,7 +12,6 @@ software_version: str # undocumented class ServerHandler(SimpleHandler): # undocumented server_software: str - def close(self) -> None: ... class WSGIServer(HTTPServer): application: WSGIApplication | None @@ -25,7 +24,6 @@ class WSGIRequestHandler(BaseHTTPRequestHandler): server_version: str def get_environ(self) -> WSGIEnvironment: ... def get_stderr(self) -> ErrorStream: ... - def handle(self) -> None: ... def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... diff --git a/mypy/typeshed/stdlib/wsgiref/util.pyi b/mypy/typeshed/stdlib/wsgiref/util.pyi index 36e5c1e69676..962fac2c5a22 100644 --- a/mypy/typeshed/stdlib/wsgiref/util.pyi +++ b/mypy/typeshed/stdlib/wsgiref/util.pyi @@ -9,7 +9,7 @@ class FileWrapper: filelike: IO[bytes] blksize: int close: Callable[[], None] # only exists if filelike.close exists - def __init__(self, filelike: IO[bytes], blksize: int = ...) -> None: ... + def __init__(self, filelike: IO[bytes], blksize: int = 8192) -> None: ... if sys.version_info < (3, 11): def __getitem__(self, key: Any) -> bytes: ... @@ -18,7 +18,7 @@ class FileWrapper: def guess_scheme(environ: WSGIEnvironment) -> str: ... def application_uri(environ: WSGIEnvironment) -> str: ... -def request_uri(environ: WSGIEnvironment, include_query: bool = ...) -> str: ... +def request_uri(environ: WSGIEnvironment, include_query: bool = True) -> str: ... def shift_path_info(environ: WSGIEnvironment) -> str | None: ... def setup_testing_defaults(environ: WSGIEnvironment) -> None: ... def is_hop_by_hop(header_name: str) -> bool: ... diff --git a/mypy/typeshed/stdlib/xdrlib.pyi b/mypy/typeshed/stdlib/xdrlib.pyi index e0b8c6a54b00..78f3ecec8d78 100644 --- a/mypy/typeshed/stdlib/xdrlib.pyi +++ b/mypy/typeshed/stdlib/xdrlib.pyi @@ -12,7 +12,6 @@ class Error(Exception): class ConversionError(Error): ... class Packer: - def __init__(self) -> None: ... def reset(self) -> None: ... def get_buffer(self) -> bytes: ... def get_buf(self) -> bytes: ... diff --git a/mypy/typeshed/stdlib/xml/__init__.pyi b/mypy/typeshed/stdlib/xml/__init__.pyi index c524ac2b1cfc..a487d2467f41 100644 --- a/mypy/typeshed/stdlib/xml/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/__init__.pyi @@ -1 +1 @@ -import xml.parsers as parsers +from xml import parsers as parsers diff --git a/mypy/typeshed/stdlib/xml/dom/domreg.pyi b/mypy/typeshed/stdlib/xml/dom/domreg.pyi index 5a276ae5f561..a46d3ff090e6 100644 --- a/mypy/typeshed/stdlib/xml/dom/domreg.pyi +++ b/mypy/typeshed/stdlib/xml/dom/domreg.pyi @@ -5,4 +5,6 @@ well_known_implementations: dict[str, str] registered: dict[str, Callable[[], DOMImplementation]] def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... -def getDOMImplementation(name: str | None = ..., features: str | Iterable[tuple[str, str | None]] = ...) -> DOMImplementation: ... +def getDOMImplementation( + name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ... +) -> DOMImplementation: ... diff --git a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi index 58914e8fabf1..45f0af7aa979 100644 --- a/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi +++ b/mypy/typeshed/stdlib/xml/dom/expatbuilder.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete, ReadableBuffer, SupportsRead from typing import Any, NoReturn from xml.dom.minidom import Document, DOMImplementation, Node, TypeInfo from xml.dom.xmlbuilder import DOMBuilderFilter, Options @@ -12,8 +13,8 @@ FILTER_INTERRUPT = DOMBuilderFilter.FILTER_INTERRUPT theDOMImplementation: DOMImplementation | None class ElementInfo: - tagName: Any - def __init__(self, tagName, model: Any | None = ...) -> None: ... + tagName: Incomplete + def __init__(self, tagName, model: Incomplete | None = None) -> None: ... def getAttributeType(self, aname) -> TypeInfo: ... def getAttributeTypeNS(self, namespaceURI, localName) -> TypeInfo: ... def isElementContent(self) -> bool: ... @@ -23,14 +24,14 @@ class ElementInfo: class ExpatBuilder: document: Document # Created in self.reset() - curNode: Any # Created in self.reset() - def __init__(self, options: Options | None = ...) -> None: ... + curNode: Incomplete # Created in self.reset() + def __init__(self, options: Options | None = None) -> None: ... def createParser(self): ... def getParser(self): ... def reset(self) -> None: ... def install(self, parser) -> None: ... - def parseFile(self, file) -> Document: ... - def parseString(self, string: str) -> Document: ... + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> Document: ... + def parseString(self, string: str | ReadableBuffer) -> Document: ... def start_doctype_decl_handler(self, doctypeName, systemId, publicId, has_internal_subset) -> None: ... def end_doctype_decl_handler(self) -> None: ... def pi_handler(self, target, data) -> None: ... @@ -67,10 +68,10 @@ class Skipper(FilterCrutch): def end_element_handler(self, *args: Any) -> None: ... class FragmentBuilder(ExpatBuilder): - fragment: Any | None - originalDocument: Any - context: Any - def __init__(self, context, options: Options | None = ...) -> None: ... + fragment: Incomplete | None + originalDocument: Incomplete + context: Incomplete + def __init__(self, context, options: Options | None = None) -> None: ... class Namespaces: def createParser(self): ... @@ -86,14 +87,14 @@ class ParseEscape(Exception): ... class InternalSubsetExtractor(ExpatBuilder): subset: Any | None def getSubset(self) -> Any | None: ... - def parseFile(self, file) -> None: ... # type: ignore[override] - def parseString(self, string: str) -> None: ... # type: ignore[override] + def parseFile(self, file: SupportsRead[ReadableBuffer | str]) -> None: ... # type: ignore[override] + def parseString(self, string: str | ReadableBuffer) -> None: ... # type: ignore[override] def start_doctype_decl_handler(self, name, publicId, systemId, has_internal_subset) -> None: ... # type: ignore[override] def end_doctype_decl_handler(self) -> NoReturn: ... def start_element_handler(self, name, attrs) -> NoReturn: ... -def parse(file, namespaces: bool = ...): ... -def parseString(string: str, namespaces: bool = ...): ... -def parseFragment(file, context, namespaces: bool = ...): ... -def parseFragmentString(string: str, context, namespaces: bool = ...): ... +def parse(file: str | SupportsRead[ReadableBuffer | str], namespaces: bool = True): ... +def parseString(string: str | ReadableBuffer, namespaces: bool = True): ... +def parseFragment(file, context, namespaces: bool = True): ... +def parseFragmentString(string: str, context, namespaces: bool = True): ... def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/mypy/typeshed/stdlib/xml/dom/minidom.pyi index 7645bd79e9c1..7bbffb88c8f7 100644 --- a/mypy/typeshed/stdlib/xml/dom/minidom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,22 +1,21 @@ import sys import xml.dom -from _typeshed import Self, SupportsRead -from typing import Any -from typing_extensions import Literal +from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite +from typing_extensions import Literal, Self from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader -def parse(file: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... -def parseString(string: str | bytes, parser: XMLReader | None = ...): ... -def getDOMImplementation(features=...) -> DOMImplementation | None: ... +def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None): ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None): ... +def getDOMImplementation(features=None) -> DOMImplementation | None: ... class Node(xml.dom.Node): namespaceURI: str | None - parentNode: Any - ownerDocument: Any - nextSibling: Any - previousSibling: Any - prefix: Any + parentNode: Incomplete + ownerDocument: Incomplete + nextSibling: Incomplete + previousSibling: Incomplete + prefix: Incomplete @property def firstChild(self) -> Node | None: ... @property @@ -25,11 +24,13 @@ class Node(xml.dom.Node): def localName(self) -> str | None: ... def __bool__(self) -> Literal[True]: ... if sys.version_info >= (3, 9): - def toxml(self, encoding: Any | None = ..., standalone: Any | None = ...): ... - def toprettyxml(self, indent: str = ..., newl: str = ..., encoding: Any | None = ..., standalone: Any | None = ...): ... + def toxml(self, encoding: str | None = None, standalone: bool | None = None): ... + def toprettyxml( + self, indent: str = "\t", newl: str = "\n", encoding: str | None = None, standalone: bool | None = None + ): ... else: - def toxml(self, encoding: Any | None = ...): ... - def toprettyxml(self, indent: str = ..., newl: str = ..., encoding: Any | None = ...): ... + def toxml(self, encoding: str | None = None): ... + def toprettyxml(self, indent: str = "\t", newl: str = "\n", encoding: str | None = None): ... def hasChildNodes(self) -> bool: ... def insertBefore(self, newChild, refChild): ... @@ -43,40 +44,40 @@ class Node(xml.dom.Node): def getInterface(self, feature): ... def getUserData(self, key): ... def setUserData(self, key, data, handler): ... - childNodes: Any + childNodes: Incomplete def unlink(self) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__(self, et, ev, tb) -> None: ... class DocumentFragment(Node): - nodeType: Any + nodeType: int nodeName: str - nodeValue: Any - attributes: Any - parentNode: Any - childNodes: Any + nodeValue: Incomplete + attributes: Incomplete + parentNode: Incomplete + childNodes: Incomplete def __init__(self) -> None: ... class Attr(Node): name: str - nodeType: Any - attributes: Any + nodeType: int + attributes: Incomplete specified: bool - ownerElement: Any + ownerElement: Incomplete namespaceURI: str | None - childNodes: Any - nodeName: Any + childNodes: Incomplete + nodeName: Incomplete nodeValue: str value: str - prefix: Any + prefix: Incomplete def __init__( - self, qName: str, namespaceURI: str | None = ..., localName: Any | None = ..., prefix: Any | None = ... + self, qName: str, namespaceURI: str | None = None, localName: str | None = None, prefix: Incomplete | None = None ) -> None: ... def unlink(self) -> None: ... @property def isId(self) -> bool: ... @property - def schemaType(self) -> Any: ... + def schemaType(self): ... class NamedNodeMap: def __init__(self, attrs, attrsNS, ownerElement) -> None: ... @@ -87,45 +88,45 @@ class NamedNodeMap: def keys(self): ... def keysNS(self): ... def values(self): ... - def get(self, name, value: Any | None = ...): ... + def get(self, name: str, value: Incomplete | None = None): ... def __len__(self) -> int: ... def __eq__(self, other: object) -> bool: ... - def __ge__(self, other: Any) -> bool: ... - def __gt__(self, other: Any) -> bool: ... - def __le__(self, other: Any) -> bool: ... - def __lt__(self, other: Any) -> bool: ... - def __getitem__(self, attname_or_tuple): ... - def __setitem__(self, attname, value) -> None: ... - def getNamedItem(self, name): ... - def getNamedItemNS(self, namespaceURI: str, localName): ... - def removeNamedItem(self, name): ... - def removeNamedItemNS(self, namespaceURI: str, localName): ... - def setNamedItem(self, node): ... - def setNamedItemNS(self, node): ... - def __delitem__(self, attname_or_tuple) -> None: ... + def __ge__(self, other: NamedNodeMap) -> bool: ... + def __gt__(self, other: NamedNodeMap) -> bool: ... + def __le__(self, other: NamedNodeMap) -> bool: ... + def __lt__(self, other: NamedNodeMap) -> bool: ... + def __getitem__(self, attname_or_tuple: tuple[str, str | None] | str): ... + def __setitem__(self, attname: str, value: Attr | str) -> None: ... + def getNamedItem(self, name: str) -> Attr | None: ... + def getNamedItemNS(self, namespaceURI: str, localName: str | None) -> Attr | None: ... + def removeNamedItem(self, name: str) -> Attr: ... + def removeNamedItemNS(self, namespaceURI: str, localName: str | None): ... + def setNamedItem(self, node: Attr) -> Attr: ... + def setNamedItemNS(self, node: Attr) -> Attr: ... + def __delitem__(self, attname_or_tuple: tuple[str, str | None] | str) -> None: ... @property def length(self) -> int: ... AttributeList = NamedNodeMap class TypeInfo: - namespace: Any - name: Any - def __init__(self, namespace, name) -> None: ... + namespace: Incomplete | None + name: str + def __init__(self, namespace: Incomplete | None, name: str) -> None: ... class Element(Node): - nodeType: Any - nodeValue: Any - schemaType: Any - parentNode: Any + nodeType: int + nodeValue: Incomplete + schemaType: Incomplete + parentNode: Incomplete tagName: str nodeName: str - prefix: Any + prefix: Incomplete namespaceURI: str | None - childNodes: Any - nextSibling: Any + childNodes: Incomplete + nextSibling: Incomplete def __init__( - self, tagName, namespaceURI: str | None = ..., prefix: Any | None = ..., localName: Any | None = ... + self, tagName, namespaceURI: str | None = None, prefix: Incomplete | None = None, localName: Incomplete | None = None ) -> None: ... def unlink(self) -> None: ... def getAttribute(self, attname: str) -> str: ... @@ -135,16 +136,16 @@ class Element(Node): def getAttributeNode(self, attrname: str): ... def getAttributeNodeNS(self, namespaceURI: str, localName): ... def setAttributeNode(self, attr): ... - setAttributeNodeNS: Any + setAttributeNodeNS: Incomplete def removeAttribute(self, name: str) -> None: ... def removeAttributeNS(self, namespaceURI: str, localName) -> None: ... def removeAttributeNode(self, node): ... - removeAttributeNodeNS: Any + removeAttributeNodeNS: Incomplete def hasAttribute(self, name: str) -> bool: ... def hasAttributeNS(self, namespaceURI: str, localName) -> bool: ... def getElementsByTagName(self, name: str): ... def getElementsByTagNameNS(self, namespaceURI: str, localName): ... - def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def hasAttributes(self) -> bool: ... def setIdAttribute(self, name) -> None: ... def setIdAttributeNS(self, namespaceURI: str, localName) -> None: ... @@ -153,10 +154,10 @@ class Element(Node): def attributes(self) -> NamedNodeMap: ... class Childless: - attributes: Any - childNodes: Any - firstChild: Any - lastChild: Any + attributes: Incomplete + childNodes: Incomplete + firstChild: Incomplete + lastChild: Incomplete def appendChild(self, node) -> None: ... def hasChildNodes(self) -> bool: ... def insertBefore(self, newChild, refChild) -> None: ... @@ -165,21 +166,21 @@ class Childless: def replaceChild(self, newChild, oldChild) -> None: ... class ProcessingInstruction(Childless, Node): - nodeType: Any - target: Any - data: Any + nodeType: int + target: Incomplete + data: Incomplete def __init__(self, target, data) -> None: ... - nodeValue: Any - nodeName: Any - def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + nodeValue: Incomplete + nodeName: Incomplete + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class CharacterData(Childless, Node): - ownerDocument: Any - previousSibling: Any + ownerDocument: Incomplete + previousSibling: Incomplete def __init__(self) -> None: ... def __len__(self) -> int: ... data: str - nodeValue: Any + nodeValue: Incomplete def substringData(self, offset: int, count: int) -> str: ... def appendData(self, arg: str) -> None: ... def insertData(self, offset: int, arg: str) -> None: ... @@ -189,12 +190,12 @@ class CharacterData(Childless, Node): def length(self) -> int: ... class Text(CharacterData): - nodeType: Any + nodeType: int nodeName: str - attributes: Any - data: Any + attributes: Incomplete + data: Incomplete def splitText(self, offset): ... - def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def replaceWholeText(self, content): ... @property def isWhitespaceInElementContent(self) -> bool: ... @@ -202,19 +203,19 @@ class Text(CharacterData): def wholeText(self) -> str: ... class Comment(CharacterData): - nodeType: Any + nodeType: int nodeName: str def __init__(self, data) -> None: ... - def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class CDATASection(Text): - nodeType: Any + nodeType: int nodeName: str - def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class ReadOnlySequentialNamedNodeMap: def __init__(self, seq=...) -> None: ... - def __len__(self): ... + def __len__(self) -> int: ... def getNamedItem(self, name): ... def getNamedItemNS(self, namespaceURI: str, localName): ... def __getitem__(self, name_or_tuple): ... @@ -227,31 +228,31 @@ class ReadOnlySequentialNamedNodeMap: def length(self) -> int: ... class Identified: - publicId: Any - systemId: Any + publicId: Incomplete + systemId: Incomplete class DocumentType(Identified, Childless, Node): - nodeType: Any - nodeValue: Any - name: Any - internalSubset: Any - entities: Any - notations: Any - nodeName: Any + nodeType: int + nodeValue: Incomplete + name: Incomplete + internalSubset: Incomplete + entities: Incomplete + notations: Incomplete + nodeName: Incomplete def __init__(self, qualifiedName: str) -> None: ... def cloneNode(self, deep): ... - def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... + def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class Entity(Identified, Node): - attributes: Any - nodeType: Any - nodeValue: Any - actualEncoding: Any - encoding: Any - version: Any - nodeName: Any - notationName: Any - childNodes: Any + attributes: Incomplete + nodeType: int + nodeValue: Incomplete + actualEncoding: Incomplete + encoding: Incomplete + version: Incomplete + nodeName: Incomplete + notationName: Incomplete + childNodes: Incomplete def __init__(self, name, publicId, systemId, notation) -> None: ... def appendChild(self, newChild) -> None: ... def insertBefore(self, newChild, refChild) -> None: ... @@ -259,19 +260,19 @@ class Entity(Identified, Node): def replaceChild(self, newChild, oldChild) -> None: ... class Notation(Identified, Childless, Node): - nodeType: Any - nodeValue: Any - nodeName: Any + nodeType: int + nodeValue: Incomplete + nodeName: Incomplete def __init__(self, name, publicId, systemId) -> None: ... class DOMImplementation(DOMImplementationLS): - def hasFeature(self, feature, version) -> bool: ... - def createDocument(self, namespaceURI: str | None, qualifiedName: str | None, doctype): ... - def createDocumentType(self, qualifiedName: str | None, publicId, systemId): ... - def getInterface(self, feature): ... + def hasFeature(self, feature: str, version: str | None) -> bool: ... + def createDocument(self, namespaceURI: str | None, qualifiedName: str | None, doctype: DocumentType | None) -> Document: ... + def createDocumentType(self, qualifiedName: str | None, publicId: str, systemId: str) -> DocumentType: ... + def getInterface(self, feature: str) -> Self | None: ... class ElementInfo: - tagName: Any + tagName: Incomplete def __init__(self, name) -> None: ... def getAttributeType(self, aname): ... def getAttributeTypeNS(self, namespaceURI: str, localName): ... @@ -281,34 +282,34 @@ class ElementInfo: def isIdNS(self, namespaceURI: str, localName): ... class Document(Node, DocumentLS): - implementation: Any - nodeType: Any + implementation: Incomplete + nodeType: int nodeName: str - nodeValue: Any - attributes: Any - parentNode: Any - previousSibling: Any - nextSibling: Any - actualEncoding: Any - encoding: Any - standalone: Any - version: Any + nodeValue: Incomplete + attributes: Incomplete + parentNode: Incomplete + previousSibling: Incomplete + nextSibling: Incomplete + actualEncoding: Incomplete + encoding: str | None + standalone: bool | None + version: Incomplete strictErrorChecking: bool - errorHandler: Any - documentURI: Any - doctype: Any - childNodes: Any + errorHandler: Incomplete + documentURI: Incomplete + doctype: DocumentType | None + childNodes: Incomplete def __init__(self) -> None: ... def appendChild(self, node): ... - documentElement: Any + documentElement: Incomplete def removeChild(self, oldChild): ... def unlink(self) -> None: ... def cloneNode(self, deep): ... - def createDocumentFragment(self): ... - def createElement(self, tagName: str): ... - def createTextNode(self, data): ... - def createCDATASection(self, data): ... - def createComment(self, data): ... + def createDocumentFragment(self) -> DocumentFragment: ... + def createElement(self, tagName: str) -> Element: ... + def createTextNode(self, data: str) -> Text: ... + def createCDATASection(self, data: str) -> CDATASection: ... + def createComment(self, data: str) -> Comment: ... def createProcessingInstruction(self, target, data): ... def createAttribute(self, qName) -> Attr: ... def createElementNS(self, namespaceURI: str, qualifiedName: str): ... @@ -316,21 +317,26 @@ class Document(Node, DocumentLS): def getElementById(self, id): ... def getElementsByTagName(self, name: str): ... def getElementsByTagNameNS(self, namespaceURI: str, localName): ... - def isSupported(self, feature, version): ... + def isSupported(self, feature: str, version: str | None) -> bool: ... def importNode(self, node, deep): ... if sys.version_info >= (3, 9): def writexml( self, - writer, - indent: str = ..., - addindent: str = ..., - newl: str = ..., - encoding: Any | None = ..., - standalone: Any | None = ..., + writer: SupportsWrite[str], + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: str | None = None, + standalone: bool | None = None, ) -> None: ... else: def writexml( - self, writer, indent: str = ..., addindent: str = ..., newl: str = ..., encoding: Any | None = ... + self, + writer: SupportsWrite[str], + indent: str = "", + addindent: str = "", + newl: str = "", + encoding: Incomplete | None = None, ) -> None: ... def renameNode(self, n, namespaceURI: str, name): ... diff --git a/mypy/typeshed/stdlib/xml/dom/pulldom.pyi b/mypy/typeshed/stdlib/xml/dom/pulldom.pyi index 07f220ddd347..920905160e43 100644 --- a/mypy/typeshed/stdlib/xml/dom/pulldom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/pulldom.pyi @@ -1,7 +1,6 @@ import sys -from _typeshed import SupportsRead +from _typeshed import Incomplete, SupportsRead from collections.abc import Sequence -from typing import Any from typing_extensions import Literal, TypeAlias from xml.dom.minidom import Document, DOMImplementation, Element, Text from xml.sax.handler import ContentHandler @@ -36,11 +35,11 @@ _Event: TypeAlias = tuple[ class PullDOM(ContentHandler): document: Document | None documentFactory: _DocumentFactory - firstEvent: Any - lastEvent: Any - elementStack: Sequence[Any] - pending_events: Sequence[Any] - def __init__(self, documentFactory: _DocumentFactory = ...) -> None: ... + firstEvent: Incomplete + lastEvent: Incomplete + elementStack: Sequence[Incomplete] + pending_events: Sequence[Incomplete] + def __init__(self, documentFactory: _DocumentFactory = None) -> None: ... def pop(self) -> Element: ... def setDocumentLocator(self, locator) -> None: ... def startPrefixMapping(self, prefix, uri) -> None: ... @@ -68,7 +67,7 @@ class DOMEventStream: parser: XMLReader bufsize: int def __init__(self, stream: SupportsRead[bytes] | SupportsRead[str], parser: XMLReader, bufsize: int) -> None: ... - pulldom: Any + pulldom: Incomplete if sys.version_info < (3, 11): def __getitem__(self, pos): ... @@ -89,6 +88,6 @@ class SAX2DOM(PullDOM): default_bufsize: int def parse( - stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ... + stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = None, bufsize: int | None = None ) -> DOMEventStream: ... -def parseString(string: str, parser: XMLReader | None = ...) -> DOMEventStream: ... +def parseString(string: str, parser: XMLReader | None = None) -> DOMEventStream: ... diff --git a/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi b/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi index f6afd8aa2786..c07e4ba2465e 100644 --- a/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi +++ b/mypy/typeshed/stdlib/xml/dom/xmlbuilder.pyi @@ -1,3 +1,4 @@ +from _typeshed import Incomplete, Unused from typing import Any, NoReturn from typing_extensions import Literal, TypeAlias from urllib.request import OpenerDirector @@ -11,20 +12,20 @@ __all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] # The same as `_DOMBuilderErrorHandlerType`? # Maybe `xml.sax.handler.ErrorHandler`? # - Return type of DOMBuilder.getFeature(). -# We could get rid of the `Any` if we knew more +# We could get rid of the `Incomplete` if we knew more # about `Options.errorHandler`. # ALIASES REPRESENTING MORE UNKNOWN TYPES: # probably the same as `Options.errorHandler`? # Maybe `xml.sax.handler.ErrorHandler`? -_DOMBuilderErrorHandlerType: TypeAlias = Any | None +_DOMBuilderErrorHandlerType: TypeAlias = Incomplete | None # probably some kind of IO... -_DOMInputSourceCharacterStreamType: TypeAlias = Any | None +_DOMInputSourceCharacterStreamType: TypeAlias = Incomplete | None # probably a string?? -_DOMInputSourceStringDataType: TypeAlias = Any | None +_DOMInputSourceStringDataType: TypeAlias = Incomplete | None # probably a string?? -_DOMInputSourceEncodingType: TypeAlias = Any | None +_DOMInputSourceEncodingType: TypeAlias = Incomplete | None class Options: namespaces: int @@ -55,18 +56,17 @@ class DOMBuilder: ACTION_APPEND_AS_CHILDREN: Literal[2] ACTION_INSERT_AFTER: Literal[3] ACTION_INSERT_BEFORE: Literal[4] - def __init__(self) -> None: ... def setFeature(self, name: str, state: int) -> None: ... def supportsFeature(self, name: str) -> bool: ... def canSetFeature(self, name: str, state: int) -> bool: ... # getFeature could return any attribute from an instance of `Options` - def getFeature(self, name: str) -> Any: ... + def getFeature(self, name: str) -> Incomplete: ... def parseURI(self, uri: str) -> ExpatBuilder | ExpatBuilderNS: ... def parse(self, input: DOMInputSource) -> ExpatBuilder | ExpatBuilderNS: ... # `input` and `cnode` argtypes for `parseWithContext` are unknowable # as the function does nothing with them, and always raises an exception. # But `input` is *probably* `DOMInputSource`? - def parseWithContext(self, input: object, cnode: object, action: Literal[1, 2, 3, 4]) -> NoReturn: ... + def parseWithContext(self, input: Unused, cnode: Unused, action: Literal[1, 2, 3, 4]) -> NoReturn: ... class DOMEntityResolver: def resolveEntity(self, publicId: str | None, systemId: str) -> DOMInputSource: ... @@ -86,9 +86,8 @@ class DOMBuilderFilter: FILTER_SKIP: Literal[3] FILTER_INTERRUPT: Literal[4] whatToShow: int - # The argtypes for acceptNode and startContainer appear to be irrelevant. - def acceptNode(self, element: object) -> Literal[1]: ... - def startContainer(self, element: object) -> Literal[1]: ... + def acceptNode(self, element: Unused) -> Literal[1]: ... + def startContainer(self, element: Unused) -> Literal[1]: ... class DocumentLS: async_: bool @@ -97,8 +96,8 @@ class DocumentLS: # so the argtypes of `uri` and `source` are unknowable. # `source` is *probably* `DOMInputSource`? # `uri` is *probably* a str? (see DOMBuilder.parseURI()) - def load(self, uri: object) -> NoReturn: ... - def loadXML(self, source: object) -> NoReturn: ... + def load(self, uri: Unused) -> NoReturn: ... + def loadXML(self, source: Unused) -> NoReturn: ... def saveXML(self, snode: Node | None) -> str: ... class DOMImplementationLS: diff --git a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi index 7bb78d0628ce..cbba15dd3ebe 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementInclude.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import FileDescriptorOrPath from collections.abc import Callable from xml.etree.ElementTree import Element @@ -11,17 +12,17 @@ if sys.version_info >= (3, 9): class FatalIncludeError(SyntaxError): ... -def default_loader(href: str | bytes | int, parse: str, encoding: str | None = ...) -> str | Element: ... +def default_loader(href: FileDescriptorOrPath, parse: str, encoding: str | None = None) -> str | Element: ... # TODO: loader is of type default_loader ie it takes a callable that has the # same signature as default_loader. But default_loader has a keyword argument # Which can't be represented using Callable... if sys.version_info >= (3, 9): def include( - elem: Element, loader: Callable[..., str | Element] | None = ..., base_url: str | None = ..., max_depth: int | None = ... + elem: Element, loader: Callable[..., str | Element] | None = None, base_url: str | None = None, max_depth: int | None = 6 ) -> None: ... class LimitedRecursiveIncludeError(FatalIncludeError): ... else: - def include(elem: Element, loader: Callable[..., str | Element] | None = ...) -> None: ... + def include(elem: Element, loader: Callable[..., str | Element] | None = None) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi b/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi index 94ce933582dd..c3f6207ea241 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementPath.pyi @@ -10,7 +10,7 @@ _Token: TypeAlias = tuple[str, str] _Next: TypeAlias = Callable[[], _Token] _Callback: TypeAlias = Callable[[_SelectorContext, list[Element]], Generator[Element, None, None]] -def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = ...) -> Generator[_Token, None, None]: ... +def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = None) -> Generator[_Token, None, None]: ... def get_parent_map(context: _SelectorContext) -> dict[Element, Element]: ... def prepare_child(next: _Next, token: _Token) -> _Callback: ... def prepare_star(next: _Next, token: _Token) -> _Callback: ... @@ -28,7 +28,7 @@ class _SelectorContext: _T = TypeVar("_T") -def iterfind(elem: Element, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... -def find(elem: Element, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... -def findall(elem: Element, path: str, namespaces: dict[str, str] | None = ...) -> list[Element]: ... -def findtext(elem: Element, path: str, default: _T | None = ..., namespaces: dict[str, str] | None = ...) -> _T | str: ... +def iterfind(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... +def find(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... +def findall(elem: Element, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... +def findtext(elem: Element, path: str, default: _T | None = None, namespaces: dict[str, str] | None = None) -> _T | str: ... diff --git a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi index 84059bc21a87..db33b2d673d7 100644 --- a/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi +++ b/mypy/typeshed/stdlib/xml/etree/ElementTree.pyi @@ -1,6 +1,6 @@ import sys from _collections_abc import dict_keys -from _typeshed import FileDescriptor, StrOrBytesPath, SupportsRead, SupportsWrite +from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence from typing import Any, TypeVar, overload from typing_extensions import Literal, SupportsIndex, TypeAlias, TypeGuard @@ -38,8 +38,8 @@ if sys.version_info >= (3, 9): __all__ += ["indent"] _T = TypeVar("_T") -_FileRead: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsRead[bytes] | SupportsRead[str] -_FileWriteC14N: TypeAlias = StrOrBytesPath | FileDescriptor | SupportsWrite[bytes] +_FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead[str] +_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes] _FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str] VERSION: str @@ -54,10 +54,10 @@ def iselement(element: object) -> TypeGuard[Element]: ... if sys.version_info >= (3, 8): @overload def canonicalize( - xml_data: str | bytes | None = ..., + xml_data: str | ReadableBuffer | None = None, *, - out: None = ..., - from_file: _FileRead | None = ..., + out: None = None, + from_file: _FileRead | None = None, with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., @@ -68,10 +68,10 @@ if sys.version_info >= (3, 8): ) -> str: ... @overload def canonicalize( - xml_data: str | bytes | None = ..., + xml_data: str | ReadableBuffer | None = None, *, out: SupportsWrite[str], - from_file: _FileRead | None = ..., + from_file: _FileRead | None = None, with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., @@ -90,20 +90,20 @@ class Element: def append(self, __subelement: Element) -> None: ... def clear(self) -> None: ... def extend(self, __elements: Iterable[Element]) -> None: ... - def find(self, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... - def findall(self, path: str, namespaces: dict[str, str] | None = ...) -> list[Element]: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... @overload - def findtext(self, path: str, default: None = ..., namespaces: dict[str, str] | None = ...) -> str | None: ... + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... @overload - def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = ...) -> _T | str: ... + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... @overload - def get(self, key: str, default: None = ...) -> str | None: ... + def get(self, key: str, default: None = None) -> str | None: ... @overload def get(self, key: str, default: _T) -> str | _T: ... def insert(self, __index: int, __subelement: Element) -> None: ... def items(self) -> ItemsView[str, str]: ... - def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... - def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def itertext(self) -> Generator[str, None, None]: ... def keys(self) -> dict_keys[str, str]: ... # makeelement returns the type of self in Python impl, but not in C impl @@ -126,17 +126,17 @@ class Element: def __setitem__(self, __s: slice, __o: Iterable[Element]) -> None: ... if sys.version_info < (3, 9): def getchildren(self) -> list[Element]: ... - def getiterator(self, tag: str | None = ...) -> list[Element]: ... + def getiterator(self, tag: str | None = None) -> list[Element]: ... def SubElement(parent: Element, tag: str, attrib: dict[str, str] = ..., **extra: str) -> Element: ... -def Comment(text: str | None = ...) -> Element: ... -def ProcessingInstruction(target: str, text: str | None = ...) -> Element: ... +def Comment(text: str | None = None) -> Element: ... +def ProcessingInstruction(target: str, text: str | None = None) -> Element: ... PI: Callable[..., Element] class QName: text: str - def __init__(self, text_or_uri: str, tag: str | None = ...) -> None: ... + def __init__(self, text_or_uri: str, tag: str | None = None) -> None: ... def __lt__(self, other: QName | str) -> bool: ... def __le__(self, other: QName | str) -> bool: ... def __gt__(self, other: QName | str) -> bool: ... @@ -144,29 +144,29 @@ class QName: def __eq__(self, other: object) -> bool: ... class ElementTree: - def __init__(self, element: Element | None = ..., file: _FileRead | None = ...) -> None: ... + def __init__(self, element: Element | None = None, file: _FileRead | None = None) -> None: ... def getroot(self) -> Element | Any: ... - def parse(self, source: _FileRead, parser: XMLParser | None = ...) -> Element: ... - def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... + def parse(self, source: _FileRead, parser: XMLParser | None = None) -> Element: ... + def iter(self, tag: str | None = None) -> Generator[Element, None, None]: ... if sys.version_info < (3, 9): - def getiterator(self, tag: str | None = ...) -> list[Element]: ... + def getiterator(self, tag: str | None = None) -> list[Element]: ... - def find(self, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... + def find(self, path: str, namespaces: dict[str, str] | None = None) -> Element | None: ... @overload - def findtext(self, path: str, default: None = ..., namespaces: dict[str, str] | None = ...) -> str | None: ... + def findtext(self, path: str, default: None = None, namespaces: dict[str, str] | None = None) -> str | None: ... @overload - def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = ...) -> _T | str: ... - def findall(self, path: str, namespaces: dict[str, str] | None = ...) -> list[Element]: ... - def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... + def findtext(self, path: str, default: _T, namespaces: dict[str, str] | None = None) -> _T | str: ... + def findall(self, path: str, namespaces: dict[str, str] | None = None) -> list[Element]: ... + def iterfind(self, path: str, namespaces: dict[str, str] | None = None) -> Generator[Element, None, None]: ... def write( self, file_or_filename: _FileWrite, - encoding: str | None = ..., - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - method: str | None = ..., + encoding: str | None = None, + xml_declaration: bool | None = None, + default_namespace: str | None = None, + method: str | None = None, *, - short_empty_elements: bool = ..., + short_empty_elements: bool = True, ) -> None: ... def write_c14n(self, file: _FileWriteC14N) -> None: ... @@ -176,113 +176,113 @@ if sys.version_info >= (3, 8): @overload def tostring( element: Element, - encoding: None = ..., - method: str | None = ..., + encoding: None = None, + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> bytes: ... @overload def tostring( element: Element, encoding: Literal["unicode"], - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> str: ... @overload def tostring( element: Element, encoding: str, - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> Any: ... @overload def tostringlist( element: Element, - encoding: None = ..., - method: str | None = ..., + encoding: None = None, + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> list[bytes]: ... @overload def tostringlist( element: Element, encoding: Literal["unicode"], - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> list[str]: ... @overload def tostringlist( element: Element, encoding: str, - method: str | None = ..., + method: str | None = None, *, - xml_declaration: bool | None = ..., - default_namespace: str | None = ..., - short_empty_elements: bool = ..., + xml_declaration: bool | None = None, + default_namespace: str | None = None, + short_empty_elements: bool = True, ) -> list[Any]: ... else: @overload def tostring( - element: Element, encoding: None = ..., method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: None = None, method: str | None = None, *, short_empty_elements: bool = True ) -> bytes: ... @overload def tostring( - element: Element, encoding: Literal["unicode"], method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: Literal["unicode"], method: str | None = None, *, short_empty_elements: bool = True ) -> str: ... @overload - def tostring(element: Element, encoding: str, method: str | None = ..., *, short_empty_elements: bool = ...) -> Any: ... + def tostring(element: Element, encoding: str, method: str | None = None, *, short_empty_elements: bool = True) -> Any: ... @overload def tostringlist( - element: Element, encoding: None = ..., method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: None = None, method: str | None = None, *, short_empty_elements: bool = True ) -> list[bytes]: ... @overload def tostringlist( - element: Element, encoding: Literal["unicode"], method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: Literal["unicode"], method: str | None = None, *, short_empty_elements: bool = True ) -> list[str]: ... @overload def tostringlist( - element: Element, encoding: str, method: str | None = ..., *, short_empty_elements: bool = ... + element: Element, encoding: str, method: str | None = None, *, short_empty_elements: bool = True ) -> list[Any]: ... def dump(elem: Element) -> None: ... if sys.version_info >= (3, 9): - def indent(tree: Element | ElementTree, space: str = ..., level: int = ...) -> None: ... + def indent(tree: Element | ElementTree, space: str = " ", level: int = 0) -> None: ... -def parse(source: _FileRead, parser: XMLParser | None = ...) -> ElementTree: ... +def parse(source: _FileRead, parser: XMLParser | None = None) -> ElementTree: ... def iterparse( - source: _FileRead, events: Sequence[str] | None = ..., parser: XMLParser | None = ... + source: _FileRead, events: Sequence[str] | None = None, parser: XMLParser | None = None ) -> Iterator[tuple[str, Any]]: ... class XMLPullParser: - def __init__(self, events: Sequence[str] | None = ..., *, _parser: XMLParser | None = ...) -> None: ... - def feed(self, data: str | bytes) -> None: ... + def __init__(self, events: Sequence[str] | None = None, *, _parser: XMLParser | None = None) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... # Second element in the tuple could be `Element`, `tuple[str, str]` or `None`. # Use `Any` to avoid false-positive errors. def read_events(self) -> Iterator[tuple[str, Any]]: ... -def XML(text: str | bytes, parser: XMLParser | None = ...) -> Element: ... -def XMLID(text: str | bytes, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... +def XML(text: str | ReadableBuffer, parser: XMLParser | None = None) -> Element: ... +def XMLID(text: str | ReadableBuffer, parser: XMLParser | None = None) -> tuple[Element, dict[str, Element]]: ... # This is aliased to XML in the source. fromstring = XML -def fromstringlist(sequence: Sequence[str | bytes], parser: XMLParser | None = ...) -> Element: ... +def fromstringlist(sequence: Sequence[str | ReadableBuffer], parser: XMLParser | None = None) -> Element: ... # This type is both not precise enough and too precise. The TreeBuilder # requires the elementfactory to accept tag and attrs in its args and produce @@ -313,13 +313,15 @@ class TreeBuilder: def __init__(self, element_factory: _ElementFactory | None = ...) -> None: ... def close(self) -> Element: ... - def data(self, __data: str | bytes) -> None: ... - def start(self, __tag: str | bytes, __attrs: dict[str | bytes, str | bytes]) -> Element: ... - def end(self, __tag: str | bytes) -> Element: ... + def data(self, __data: str) -> None: ... + # tag and attrs are passed to the element_factory, so they could be anything + # depending on what the particular factory supports. + def start(self, __tag: Any, __attrs: dict[Any, Any]) -> Element: ... + def end(self, __tag: str) -> Element: ... if sys.version_info >= (3, 8): # These two methods have pos-only parameters in the C implementation def comment(self, __text: str | None) -> Element: ... - def pi(self, __target: str, __text: str | None = ...) -> Element: ... + def pi(self, __target: str, __text: str | None = None) -> Element: ... if sys.version_info >= (3, 8): class C14NWriterTarget: @@ -327,13 +329,13 @@ if sys.version_info >= (3, 8): self, write: Callable[[str], object], *, - with_comments: bool = ..., - strip_text: bool = ..., - rewrite_prefixes: bool = ..., - qname_aware_tags: Iterable[str] | None = ..., - qname_aware_attrs: Iterable[str] | None = ..., - exclude_attrs: Iterable[str] | None = ..., - exclude_tags: Iterable[str] | None = ..., + with_comments: bool = False, + strip_text: bool = False, + rewrite_prefixes: bool = False, + qname_aware_tags: Iterable[str] | None = None, + qname_aware_attrs: Iterable[str] | None = None, + exclude_attrs: Iterable[str] | None = None, + exclude_tags: Iterable[str] | None = None, ) -> None: ... def data(self, data: str) -> None: ... def start_ns(self, prefix: str, uri: str) -> None: ... @@ -355,4 +357,4 @@ class XMLParser: def doctype(self, __name: str, __pubid: str, __system: str) -> None: ... def close(self) -> Any: ... - def feed(self, __data: str | bytes) -> None: ... + def feed(self, __data: str | ReadableBuffer) -> None: ... diff --git a/mypy/typeshed/stdlib/xml/parsers/__init__.pyi b/mypy/typeshed/stdlib/xml/parsers/__init__.pyi index cac086235cba..cebdb6a30014 100644 --- a/mypy/typeshed/stdlib/xml/parsers/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/parsers/__init__.pyi @@ -1 +1 @@ -import xml.parsers.expat as expat +from xml.parsers import expat as expat diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index af4ee052480f..ca981a00d25f 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import SupportsRead, _T_co +from _typeshed import ReadableBuffer, StrPath, SupportsRead, _T_co from collections.abc import Iterable from typing import Any, NoReturn, Protocol from xml.sax.handler import ContentHandler as ContentHandler, ErrorHandler as ErrorHandler @@ -9,13 +9,13 @@ class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]): def close(self) -> None: ... class SAXException(Exception): - def __init__(self, msg: str, exception: Exception | None = ...) -> None: ... + def __init__(self, msg: str, exception: Exception | None = None) -> None: ... def getMessage(self) -> str: ... def getException(self) -> Exception: ... def __getitem__(self, ix: Any) -> NoReturn: ... class SAXParseException(SAXException): - def __init__(self, msg: str, exception: Exception, locator: Locator) -> None: ... + def __init__(self, msg: str, exception: Exception | None, locator: Locator) -> None: ... def getColumnNumber(self) -> int: ... def getLineNumber(self) -> int: ... def getPublicId(self): ... @@ -29,12 +29,19 @@ default_parser_list: list[str] if sys.version_info >= (3, 8): def make_parser(parser_list: Iterable[str] = ...) -> XMLReader: ... + def parse( + source: StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str], + handler: ContentHandler, + errorHandler: ErrorHandler = ..., + ) -> None: ... else: def make_parser(parser_list: list[str] = ...) -> XMLReader: ... + def parse( + source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], + handler: ContentHandler, + errorHandler: ErrorHandler = ..., + ) -> None: ... -def parse( - source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, errorHandler: ErrorHandler = ... -) -> None: ... -def parseString(string: bytes | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... +def parseString(string: ReadableBuffer | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/mypy/typeshed/stdlib/xml/sax/handler.pyi b/mypy/typeshed/stdlib/xml/sax/handler.pyi index abf124f836cd..63b725bd6da6 100644 --- a/mypy/typeshed/stdlib/xml/sax/handler.pyi +++ b/mypy/typeshed/stdlib/xml/sax/handler.pyi @@ -1,14 +1,14 @@ import sys +from typing import NoReturn version: str class ErrorHandler: - def error(self, exception): ... - def fatalError(self, exception): ... - def warning(self, exception): ... + def error(self, exception: BaseException) -> NoReturn: ... + def fatalError(self, exception: BaseException) -> NoReturn: ... + def warning(self, exception: BaseException) -> None: ... class ContentHandler: - def __init__(self) -> None: ... def setDocumentLocator(self, locator): ... def startDocument(self): ... def endDocument(self): ... diff --git a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi index 1361949d0c3e..67a06d2fcda2 100644 --- a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi @@ -11,9 +11,9 @@ def quoteattr(data: str, entities: Mapping[str, str] = ...) -> str: ... class XMLGenerator(handler.ContentHandler): def __init__( self, - out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[str] | None = ..., - encoding: str = ..., - short_empty_elements: bool = ..., + out: TextIOBase | RawIOBase | StreamWriter | StreamReaderWriter | SupportsWrite[str] | None = None, + encoding: str = "iso-8859-1", + short_empty_elements: bool = False, ) -> None: ... def startDocument(self): ... def endDocument(self): ... @@ -28,7 +28,7 @@ class XMLGenerator(handler.ContentHandler): def processingInstruction(self, target, data): ... class XMLFilterBase(xmlreader.XMLReader): - def __init__(self, parent: xmlreader.XMLReader | None = ...) -> None: ... + def __init__(self, parent: xmlreader.XMLReader | None = None) -> None: ... def error(self, exception): ... def fatalError(self, exception): ... def warning(self, exception): ... @@ -57,4 +57,4 @@ class XMLFilterBase(xmlreader.XMLReader): def getParent(self): ... def setParent(self, parent): ... -def prepare_input_source(source, base=...): ... +def prepare_input_source(source, base=""): ... diff --git a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi index d7d4db5b0a16..0bf167b04a37 100644 --- a/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi +++ b/mypy/typeshed/stdlib/xml/sax/xmlreader.pyi @@ -1,7 +1,6 @@ from collections.abc import Mapping class XMLReader: - def __init__(self) -> None: ... def parse(self, source): ... def getContentHandler(self): ... def setContentHandler(self, handler): ... @@ -18,7 +17,7 @@ class XMLReader: def setProperty(self, name, value): ... class IncrementalParser(XMLReader): - def __init__(self, bufsize: int = ...) -> None: ... + def __init__(self, bufsize: int = 65536) -> None: ... def parse(self, source): ... def feed(self, data): ... def prepareParser(self, source): ... @@ -32,7 +31,7 @@ class Locator: def getSystemId(self): ... class InputSource: - def __init__(self, system_id: str | None = ...) -> None: ... + def __init__(self, system_id: str | None = None) -> None: ... def setPublicId(self, public_id): ... def getPublicId(self): ... def setSystemId(self, system_id): ... @@ -54,11 +53,11 @@ class AttributesImpl: def getQNameByName(self, name): ... def getNames(self): ... def getQNames(self): ... - def __len__(self): ... + def __len__(self) -> int: ... def __getitem__(self, name): ... def keys(self): ... def __contains__(self, name): ... - def get(self, name, alternative=...): ... + def get(self, name, alternative=None): ... def copy(self): ... def items(self): ... def values(self): ... diff --git a/mypy/typeshed/stdlib/xmlrpc/client.pyi b/mypy/typeshed/stdlib/xmlrpc/client.pyi index 7c0ba5c62fd7..7bf701ae716d 100644 --- a/mypy/typeshed/stdlib/xmlrpc/client.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -2,23 +2,36 @@ import gzip import http.client import sys import time -from _typeshed import Self, SupportsRead, SupportsWrite +from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite, _BufferWithLen from collections.abc import Callable, Iterable, Mapping from datetime import datetime from io import BytesIO from types import TracebackType -from typing import Any, Protocol, Union, overload -from typing_extensions import Literal, TypeAlias +from typing import Any, Protocol, overload +from typing_extensions import Literal, Self, TypeAlias class _SupportsTimeTuple(Protocol): def timetuple(self) -> time.struct_time: ... _DateTimeComparable: TypeAlias = DateTime | datetime | str | _SupportsTimeTuple _Marshallable: TypeAlias = ( - bool | int | float | str | bytes | None | tuple[Any, ...] | list[Any] | dict[Any, Any] | datetime | DateTime | Binary + bool + | int + | float + | str + | bytes + | bytearray + | None + | tuple[_Marshallable, ...] + # Ideally we'd use _Marshallable for list and dict, but invariance makes that impractical + | list[Any] + | dict[str, Any] + | datetime + | DateTime + | Binary ) _XMLDate: TypeAlias = int | datetime | tuple[int, ...] | time.struct_time -_HostType: TypeAlias = Union[tuple[str, dict[str, str]], str] +_HostType: TypeAlias = tuple[str, dict[str, str]] | str def escape(s: str) -> str: ... # undocumented @@ -42,7 +55,6 @@ INTERNAL_ERROR: int # undocumented class Error(Exception): ... class ProtocolError(Error): - url: str errcode: int errmsg: str @@ -52,7 +64,6 @@ class ProtocolError(Error): class ResponseError(Error): ... class Fault(Error): - faultCode: int faultString: str def __init__(self, faultCode: int, faultString: str, **extra: Any) -> None: ... @@ -64,9 +75,8 @@ def _iso8601_format(value: datetime) -> str: ... # undocumented def _strftime(value: _XMLDate) -> str: ... # undocumented class DateTime: - value: str # undocumented - def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = ...) -> None: ... + def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = 0) -> None: ... def __lt__(self, other: _DateTimeComparable) -> bool: ... def __le__(self, other: _DateTimeComparable) -> bool: ... def __gt__(self, other: _DateTimeComparable) -> bool: ... @@ -81,20 +91,19 @@ def _datetime(data: Any) -> DateTime: ... # undocumented def _datetime_type(data: str) -> datetime: ... # undocumented class Binary: - data: bytes - def __init__(self, data: bytes | None = ...) -> None: ... - def decode(self, data: bytes) -> None: ... + def __init__(self, data: bytes | bytearray | None = None) -> None: ... + def decode(self, data: ReadableBuffer) -> None: ... def encode(self, out: SupportsWrite[str]) -> None: ... def __eq__(self, other: object) -> bool: ... -def _binary(data: bytes) -> Binary: ... # undocumented +def _binary(data: ReadableBuffer) -> Binary: ... # undocumented WRAPPERS: tuple[type[DateTime], type[Binary]] # undocumented class ExpatParser: # undocumented def __init__(self, target: Unmarshaller) -> None: ... - def feed(self, data: str | bytes) -> None: ... + def feed(self, data: str | ReadableBuffer) -> None: ... def close(self) -> None: ... _WriteCallback: TypeAlias = Callable[[str], object] @@ -106,7 +115,7 @@ class Marshaller: data: None encoding: str | None allow_none: bool - def __init__(self, encoding: str | None = ..., allow_none: bool = ...) -> None: ... + def __init__(self, encoding: str | None = None, allow_none: bool = False) -> None: ... def dumps(self, values: Fault | Iterable[_Marshallable]) -> str: ... def __dump(self, value: _Marshallable, write: _WriteCallback) -> None: ... # undocumented def dump_nil(self, value: None, write: _WriteCallback) -> None: ... @@ -115,7 +124,7 @@ class Marshaller: def dump_int(self, value: int, write: _WriteCallback) -> None: ... def dump_double(self, value: float, write: _WriteCallback) -> None: ... def dump_unicode(self, value: str, write: _WriteCallback, escape: Callable[[str], str] = ...) -> None: ... - def dump_bytes(self, value: bytes, write: _WriteCallback) -> None: ... + def dump_bytes(self, value: ReadableBuffer, write: _WriteCallback) -> None: ... def dump_array(self, value: Iterable[_Marshallable], write: _WriteCallback) -> None: ... def dump_struct( self, value: Mapping[str, _Marshallable], write: _WriteCallback, escape: Callable[[str], str] = ... @@ -124,7 +133,6 @@ class Marshaller: def dump_instance(self, value: object, write: _WriteCallback) -> None: ... class Unmarshaller: - dispatch: dict[str, Callable[[Unmarshaller, str], None]] _type: str | None @@ -137,7 +145,7 @@ class Unmarshaller: append: Callable[[Any], None] _use_datetime: bool _use_builtin_types: bool - def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... def close(self) -> tuple[_Marshallable, ...]: ... def getmethodname(self) -> str | None: ... def xml(self, encoding: str, standalone: Any) -> None: ... # Standalone is ignored @@ -161,7 +169,6 @@ class Unmarshaller: def end_methodName(self, data: str) -> None: ... class _MultiCallMethod: # undocumented - __call_list: list[tuple[str, tuple[_Marshallable, ...]]] __name: str def __init__(self, call_list: list[tuple[str, _Marshallable]], name: str) -> None: ... @@ -169,13 +176,11 @@ class _MultiCallMethod: # undocumented def __call__(self, *args: _Marshallable) -> None: ... class MultiCallIterator: # undocumented - results: list[list[_Marshallable]] def __init__(self, results: list[list[_Marshallable]]) -> None: ... def __getitem__(self, i: int) -> _Marshallable: ... class MultiCall: - __server: ServerProxy __call_list: list[tuple[str, tuple[_Marshallable, ...]]] def __init__(self, server: ServerProxy) -> None: ... @@ -187,26 +192,25 @@ FastMarshaller: Marshaller | None FastParser: ExpatParser | None FastUnmarshaller: Unmarshaller | None -def getparser(use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[ExpatParser, Unmarshaller]: ... +def getparser(use_datetime: bool = False, use_builtin_types: bool = False) -> tuple[ExpatParser, Unmarshaller]: ... def dumps( params: Fault | tuple[_Marshallable, ...], - methodname: str | None = ..., - methodresponse: bool | None = ..., - encoding: str | None = ..., - allow_none: bool = ..., + methodname: str | None = None, + methodresponse: bool | None = None, + encoding: str | None = None, + allow_none: bool = False, ) -> str: ... -def loads(data: str, use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[tuple[_Marshallable, ...], str | None]: ... -def gzip_encode(data: bytes) -> bytes: ... # undocumented -def gzip_decode(data: bytes, max_decode: int = ...) -> bytes: ... # undocumented +def loads( + data: str, use_datetime: bool = False, use_builtin_types: bool = False +) -> tuple[tuple[_Marshallable, ...], str | None]: ... +def gzip_encode(data: ReadableBuffer) -> bytes: ... # undocumented +def gzip_decode(data: ReadableBuffer, max_decode: int = 20971520) -> bytes: ... # undocumented class GzipDecodedResponse(gzip.GzipFile): # undocumented - io: BytesIO - def __init__(self, response: SupportsRead[bytes]) -> None: ... - def close(self) -> None: ... + def __init__(self, response: SupportsRead[ReadableBuffer]) -> None: ... class _Method: # undocumented - __send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable] __name: str def __init__(self, send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable], name: str) -> None: ... @@ -214,7 +218,6 @@ class _Method: # undocumented def __call__(self, *args: _Marshallable) -> _Marshallable: ... class Transport: - user_agent: str accept_gzip_encoding: bool encode_threshold: int | None @@ -227,42 +230,46 @@ class Transport: if sys.version_info >= (3, 8): def __init__( - self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, headers: Iterable[tuple[str, str]] = ... + self, use_datetime: bool = False, use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ... ) -> None: ... else: - def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... + def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... - def request(self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ...) -> tuple[_Marshallable, ...]: ... + def request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = False + ) -> tuple[_Marshallable, ...]: ... def single_request( - self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ... + self, host: _HostType, handler: str, request_body: _BufferWithLen, verbose: bool = False ) -> tuple[_Marshallable, ...]: ... def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... def make_connection(self, host: _HostType) -> http.client.HTTPConnection: ... def close(self) -> None: ... - def send_request(self, host: _HostType, handler: str, request_body: bytes, debug: bool) -> http.client.HTTPConnection: ... + def send_request( + self, host: _HostType, handler: str, request_body: _BufferWithLen, debug: bool + ) -> http.client.HTTPConnection: ... def send_headers(self, connection: http.client.HTTPConnection, headers: list[tuple[str, str]]) -> None: ... - def send_content(self, connection: http.client.HTTPConnection, request_body: bytes) -> None: ... + def send_content(self, connection: http.client.HTTPConnection, request_body: _BufferWithLen) -> None: ... def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): - if sys.version_info >= (3, 8): def __init__( self, - use_datetime: bool = ..., - use_builtin_types: bool = ..., + use_datetime: bool = False, + use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ..., - context: Any | None = ..., + context: Any | None = None, ) -> None: ... else: - def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, context: Any | None = ...) -> None: ... + def __init__( + self, use_datetime: bool = False, use_builtin_types: bool = False, *, context: Any | None = None + ) -> None: ... def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... class ServerProxy: - __host: str __handler: str __transport: Transport @@ -274,28 +281,28 @@ class ServerProxy: def __init__( self, uri: str, - transport: Transport | None = ..., - encoding: str | None = ..., - verbose: bool = ..., - allow_none: bool = ..., - use_datetime: bool = ..., - use_builtin_types: bool = ..., + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ..., - context: Any | None = ..., + context: Any | None = None, ) -> None: ... else: def __init__( self, uri: str, - transport: Transport | None = ..., - encoding: str | None = ..., - verbose: bool = ..., - allow_none: bool = ..., - use_datetime: bool = ..., - use_builtin_types: bool = ..., + transport: Transport | None = None, + encoding: str | None = None, + verbose: bool = False, + allow_none: bool = False, + use_datetime: bool = False, + use_builtin_types: bool = False, *, - context: Any | None = ..., + context: Any | None = None, ) -> None: ... def __getattr__(self, name: str) -> _Method: ... @@ -305,7 +312,7 @@ class ServerProxy: def __call__(self, attr: Literal["transport"]) -> Transport: ... @overload def __call__(self, attr: str) -> Callable[[], None] | Transport: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/mypy/typeshed/stdlib/xmlrpc/server.pyi b/mypy/typeshed/stdlib/xmlrpc/server.pyi index e4fc300343bf..800c205513c6 100644 --- a/mypy/typeshed/stdlib/xmlrpc/server.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -2,14 +2,10 @@ import http.server import pydoc import socketserver from collections.abc import Callable, Iterable, Mapping -from datetime import datetime from re import Pattern from typing import Any, ClassVar, Protocol from typing_extensions import TypeAlias -from xmlrpc.client import Fault - -# TODO: Recursive type on tuple, list, dict -_Marshallable: TypeAlias = None | bool | int | float | str | bytes | tuple[Any, ...] | list[Any] | dict[Any, Any] | datetime +from xmlrpc.client import Fault, _Marshallable # The dispatch accepts anywhere from 0 to N arguments, no easy way to allow this in mypy class _DispatchArity0(Protocol): @@ -36,26 +32,25 @@ _DispatchProtocol: TypeAlias = ( _DispatchArity0 | _DispatchArity1 | _DispatchArity2 | _DispatchArity3 | _DispatchArity4 | _DispatchArityN ) -def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = ...) -> Any: ... # undocumented +def resolve_dotted_attribute(obj: Any, attr: str, allow_dotted_names: bool = True) -> Any: ... # undocumented def list_public_methods(obj: Any) -> list[str]: ... # undocumented class SimpleXMLRPCDispatcher: # undocumented - funcs: dict[str, _DispatchProtocol] instance: Any | None allow_none: bool encoding: str use_builtin_types: bool - def __init__(self, allow_none: bool = ..., encoding: str | None = ..., use_builtin_types: bool = ...) -> None: ... - def register_instance(self, instance: Any, allow_dotted_names: bool = ...) -> None: ... - def register_function(self, function: _DispatchProtocol | None = ..., name: str | None = ...) -> Callable[..., Any]: ... + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... + def register_instance(self, instance: Any, allow_dotted_names: bool = False) -> None: ... + def register_function(self, function: _DispatchProtocol | None = None, name: str | None = None) -> Callable[..., Any]: ... def register_introspection_functions(self) -> None: ... def register_multicall_functions(self) -> None: ... def _marshaled_dispatch( self, data: str, - dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = ..., - path: Any | None = ..., + dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = None, + path: Any | None = None, ) -> str: ... # undocumented def system_listMethods(self) -> list[str]: ... # undocumented def system_methodSignature(self, method_name: str) -> str: ... # undocumented @@ -72,72 +67,58 @@ class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): def do_POST(self) -> None: ... def decode_request_content(self, data: bytes) -> bytes | None: ... def report_404(self) -> None: ... - def log_request(self, code: int | str = ..., size: int | str = ...) -> None: ... class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): - - allow_reuse_address: bool _send_traceback_handler: bool def __init__( self, addr: tuple[str, int], requestHandler: type[SimpleXMLRPCRequestHandler] = ..., - logRequests: bool = ..., - allow_none: bool = ..., - encoding: str | None = ..., - bind_and_activate: bool = ..., - use_builtin_types: bool = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, ) -> None: ... class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented - dispatchers: dict[str, SimpleXMLRPCDispatcher] - allow_none: bool - encoding: str def __init__( self, addr: tuple[str, int], requestHandler: type[SimpleXMLRPCRequestHandler] = ..., - logRequests: bool = ..., - allow_none: bool = ..., - encoding: str | None = ..., - bind_and_activate: bool = ..., - use_builtin_types: bool = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, ) -> None: ... def add_dispatcher(self, path: str, dispatcher: SimpleXMLRPCDispatcher) -> SimpleXMLRPCDispatcher: ... def get_dispatcher(self, path: str) -> SimpleXMLRPCDispatcher: ... - def _marshaled_dispatch( - self, - data: str, - dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = ..., - path: Any | None = ..., - ) -> str: ... class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): - def __init__(self, allow_none: bool = ..., encoding: str | None = ..., use_builtin_types: bool = ...) -> None: ... + def __init__(self, allow_none: bool = False, encoding: str | None = None, use_builtin_types: bool = False) -> None: ... def handle_xmlrpc(self, request_text: str) -> None: ... def handle_get(self) -> None: ... - def handle_request(self, request_text: str | None = ...) -> None: ... + def handle_request(self, request_text: str | None = None) -> None: ... class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented def docroutine( # type: ignore[override] self, object: object, name: str, - mod: str | None = ..., + mod: str | None = None, funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., - cl: type | None = ..., + cl: type | None = None, ) -> str: ... def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... class XMLRPCDocGenerator: # undocumented - server_name: str server_documentation: str server_title: str - def __init__(self) -> None: ... def set_server_title(self, server_title: str) -> None: ... def set_server_name(self, server_name: str) -> None: ... def set_server_documentation(self, server_documentation: str) -> None: ... @@ -151,11 +132,11 @@ class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): self, addr: tuple[str, int], requestHandler: type[SimpleXMLRPCRequestHandler] = ..., - logRequests: bool = ..., - allow_none: bool = ..., - encoding: str | None = ..., - bind_and_activate: bool = ..., - use_builtin_types: bool = ..., + logRequests: bool = True, + allow_none: bool = False, + encoding: str | None = None, + bind_and_activate: bool = True, + use_builtin_types: bool = False, ) -> None: ... class DocCGIXMLRPCRequestHandler(CGIXMLRPCRequestHandler, XMLRPCDocGenerator): diff --git a/mypy/typeshed/stdlib/zipapp.pyi b/mypy/typeshed/stdlib/zipapp.pyi index 3363161c3c6f..c7cf1704b135 100644 --- a/mypy/typeshed/stdlib/zipapp.pyi +++ b/mypy/typeshed/stdlib/zipapp.pyi @@ -11,10 +11,10 @@ class ZipAppError(ValueError): ... def create_archive( source: _Path, - target: _Path | None = ..., - interpreter: str | None = ..., - main: str | None = ..., - filter: Callable[[Path], bool] | None = ..., - compressed: bool = ..., + target: _Path | None = None, + interpreter: str | None = None, + main: str | None = None, + filter: Callable[[Path], bool] | None = None, + compressed: bool = False, ) -> None: ... def get_interpreter(archive: _Path) -> str: ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index da1710787252..b969d0cf9e6a 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -1,11 +1,11 @@ import io import sys -from _typeshed import Self, StrOrBytesPath, StrPath +from _typeshed import StrOrBytesPath, StrPath, _BufferWithLen from collections.abc import Callable, Iterable, Iterator from os import PathLike from types import TracebackType from typing import IO, Any, Protocol, overload -from typing_extensions import Literal, TypeAlias +from typing_extensions import Literal, Self, TypeAlias __all__ = [ "BadZipFile", @@ -70,7 +70,7 @@ class ZipExtFile(io.BufferedIOBase): fileobj: _ClosableZipStream, mode: _ReadWriteMode, zipinfo: ZipInfo, - pwd: bytes | None = ..., + pwd: bytes | None = None, *, close_fileobj: Literal[True], ) -> None: ... @@ -80,14 +80,14 @@ class ZipExtFile(io.BufferedIOBase): fileobj: _ZipStream, mode: _ReadWriteMode, zipinfo: ZipInfo, - pwd: bytes | None = ..., - close_fileobj: Literal[False] = ..., + pwd: bytes | None = None, + close_fileobj: Literal[False] = False, ) -> None: ... - def read(self, n: int | None = ...) -> bytes: ... - def readline(self, limit: int = ...) -> bytes: ... # type: ignore[override] - def peek(self, n: int = ...) -> bytes: ... + def read(self, n: int | None = -1) -> bytes: ... + def readline(self, limit: int = -1) -> bytes: ... # type: ignore[override] + def peek(self, n: int = 1) -> bytes: ... def read1(self, n: int | None) -> bytes: ... # type: ignore[override] - def seek(self, offset: int, whence: int = ...) -> int: ... + def seek(self, offset: int, whence: int = 0) -> int: ... class _Writer(Protocol): def write(self, __s: str) -> object: ... @@ -103,54 +103,54 @@ class ZipFile: compression: int # undocumented compresslevel: int | None # undocumented mode: _ZipFileMode # undocumented - pwd: str | None # undocumented + pwd: bytes | None # undocumented if sys.version_info >= (3, 11): @overload def __init__( self, file: StrPath | IO[bytes], - mode: Literal["r"] = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: Literal["r"] = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, *, - strict_timestamps: bool = ..., + strict_timestamps: bool = True, metadata_encoding: str | None, ) -> None: ... @overload def __init__( self, file: StrPath | IO[bytes], - mode: _ZipFileMode = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, *, - strict_timestamps: bool = ..., - metadata_encoding: None = ..., + strict_timestamps: bool = True, + metadata_encoding: None = None, ) -> None: ... elif sys.version_info >= (3, 8): def __init__( self, file: StrPath | IO[bytes], - mode: _ZipFileMode = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, *, - strict_timestamps: bool = ..., + strict_timestamps: bool = True, ) -> None: ... else: def __init__( self, file: StrPath | IO[bytes], - mode: _ZipFileMode = ..., - compression: int = ..., - allowZip64: bool = ..., - compresslevel: int | None = ..., + mode: _ZipFileMode = "r", + compression: int = 0, + allowZip64: bool = True, + compresslevel: int | None = None, ) -> None: ... - def __enter__(self: Self) -> Self: ... + def __enter__(self) -> Self: ... def __exit__( self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... @@ -159,30 +159,38 @@ class ZipFile: def infolist(self) -> list[ZipInfo]: ... def namelist(self) -> list[str]: ... def open( - self, name: str | ZipInfo, mode: _ReadWriteMode = ..., pwd: bytes | None = ..., *, force_zip64: bool = ... + self, name: str | ZipInfo, mode: _ReadWriteMode = "r", pwd: bytes | None = None, *, force_zip64: bool = False ) -> IO[bytes]: ... - def extract(self, member: str | ZipInfo, path: StrPath | None = ..., pwd: bytes | None = ...) -> str: ... + def extract(self, member: str | ZipInfo, path: StrPath | None = None, pwd: bytes | None = None) -> str: ... def extractall( - self, path: StrPath | None = ..., members: Iterable[str | ZipInfo] | None = ..., pwd: bytes | None = ... + self, path: StrPath | None = None, members: Iterable[str | ZipInfo] | None = None, pwd: bytes | None = None ) -> None: ... - def printdir(self, file: _Writer | None = ...) -> None: ... + def printdir(self, file: _Writer | None = None) -> None: ... def setpassword(self, pwd: bytes) -> None: ... - def read(self, name: str | ZipInfo, pwd: bytes | None = ...) -> bytes: ... + def read(self, name: str | ZipInfo, pwd: bytes | None = None) -> bytes: ... def testzip(self) -> str | None: ... def write( - self, filename: StrPath, arcname: StrPath | None = ..., compress_type: int | None = ..., compresslevel: int | None = ... + self, + filename: StrPath, + arcname: StrPath | None = None, + compress_type: int | None = None, + compresslevel: int | None = None, ) -> None: ... def writestr( - self, zinfo_or_arcname: str | ZipInfo, data: bytes | str, compress_type: int | None = ..., compresslevel: int | None = ... + self, + zinfo_or_arcname: str | ZipInfo, + data: _BufferWithLen | str, + compress_type: int | None = None, + compresslevel: int | None = None, ) -> None: ... if sys.version_info >= (3, 11): - def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = ...) -> None: ... + def mkdir(self, zinfo_or_directory_name: str | ZipInfo, mode: int = 0o777) -> None: ... class PyZipFile(ZipFile): def __init__( - self, file: str | IO[bytes], mode: _ZipFileMode = ..., compression: int = ..., allowZip64: bool = ..., optimize: int = ... + self, file: str | IO[bytes], mode: _ZipFileMode = "r", compression: int = 0, allowZip64: bool = True, optimize: int = -1 ) -> None: ... - def writepy(self, pathname: str, basename: str = ..., filterfunc: Callable[[str], bool] | None = ...) -> None: ... + def writepy(self, pathname: str, basename: str = "", filterfunc: Callable[[str], bool] | None = None) -> None: ... class ZipInfo: filename: str @@ -203,18 +211,16 @@ class ZipInfo: compress_size: int file_size: int orig_filename: str # undocumented - def __init__(self, filename: str = ..., date_time: _DateTuple = ...) -> None: ... + def __init__(self, filename: str = "NoName", date_time: _DateTuple = ...) -> None: ... if sys.version_info >= (3, 8): @classmethod - def from_file( - cls: type[Self], filename: StrPath, arcname: StrPath | None = ..., *, strict_timestamps: bool = ... - ) -> Self: ... + def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... else: @classmethod - def from_file(cls: type[Self], filename: StrPath, arcname: StrPath | None = ...) -> Self: ... + def from_file(cls, filename: StrPath, arcname: StrPath | None = None) -> Self: ... def is_dir(self) -> bool: ... - def FileHeader(self, zip64: bool | None = ...) -> bytes: ... + def FileHeader(self, zip64: bool | None = None) -> bytes: ... class _PathOpenProtocol(Protocol): def __call__(self, mode: _ReadWriteMode = ..., pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ... @@ -236,9 +242,11 @@ if sys.version_info >= (3, 8): @property def stem(self) -> str: ... - def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = ...) -> None: ... + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... if sys.version_info >= (3, 9): - def open(self, mode: _ReadWriteBinaryMode = ..., *args: Any, pwd: bytes | None = ..., **kwargs: Any) -> IO[bytes]: ... + def open( + self, mode: _ReadWriteBinaryMode = "r", *args: Any, pwd: bytes | None = None, **kwargs: Any + ) -> IO[bytes]: ... else: @property def open(self) -> _PathOpenProtocol: ... diff --git a/mypy/typeshed/stdlib/zipimport.pyi b/mypy/typeshed/stdlib/zipimport.pyi index db06544138ca..ee97faace379 100644 --- a/mypy/typeshed/stdlib/zipimport.pyi +++ b/mypy/typeshed/stdlib/zipimport.pyi @@ -1,9 +1,8 @@ -import os import sys +from _typeshed import StrOrBytesPath from importlib.abc import ResourceReader from importlib.machinery import ModuleSpec from types import CodeType, ModuleType -from typing import Any if sys.version_info >= (3, 8): __all__ = ["ZipImportError", "zipimporter"] @@ -13,16 +12,20 @@ class ZipImportError(ImportError): ... class zipimporter: archive: str prefix: str - def __init__(self, path: str | bytes | os.PathLike[Any]) -> None: ... - def find_loader(self, fullname: str, path: str | None = ...) -> tuple[zipimporter | None, list[str]]: ... # undocumented - def find_module(self, fullname: str, path: str | None = ...) -> zipimporter | None: ... + if sys.version_info >= (3, 11): + def __init__(self, path: str) -> None: ... + else: + def __init__(self, path: StrOrBytesPath) -> None: ... + + def find_loader(self, fullname: str, path: str | None = None) -> tuple[zipimporter | None, list[str]]: ... # undocumented + def find_module(self, fullname: str, path: str | None = None) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: ... - def get_data(self, pathname: str) -> str: ... + def get_data(self, pathname: str) -> bytes: ... def get_filename(self, fullname: str) -> str: ... def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented def get_source(self, fullname: str) -> str | None: ... def is_package(self, fullname: str) -> bool: ... def load_module(self, fullname: str) -> ModuleType: ... if sys.version_info >= (3, 10): - def find_spec(self, fullname: str, target: ModuleType | None = ...) -> ModuleSpec | None: ... + def find_spec(self, fullname: str, target: ModuleType | None = None) -> ModuleSpec | None: ... def invalidate_caches(self) -> None: ... diff --git a/mypy/typeshed/stdlib/zlib.pyi b/mypy/typeshed/stdlib/zlib.pyi index cfd6784bb771..c3419af0de3f 100644 --- a/mypy/typeshed/stdlib/zlib.pyi +++ b/mypy/typeshed/stdlib/zlib.pyi @@ -1,6 +1,5 @@ import sys -from array import array -from typing import Any +from _typeshed import ReadableBuffer from typing_extensions import Literal DEFLATED: Literal[8] @@ -29,7 +28,7 @@ Z_TREES: Literal[6] class error(Exception): ... class _Compress: - def compress(self, data: bytes) -> bytes: ... + def compress(self, data: ReadableBuffer) -> bytes: ... def flush(self, mode: int = ...) -> bytes: ... def copy(self) -> _Compress: ... @@ -37,21 +36,21 @@ class _Decompress: unused_data: bytes unconsumed_tail: bytes eof: bool - def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... + def decompress(self, data: ReadableBuffer, max_length: int = ...) -> bytes: ... def flush(self, length: int = ...) -> bytes: ... def copy(self) -> _Decompress: ... -def adler32(__data: bytes, __value: int = ...) -> int: ... +def adler32(__data: ReadableBuffer, __value: int = 1) -> int: ... if sys.version_info >= (3, 11): - def compress(__data: bytes, level: int = ..., wbits: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = -1, wbits: int = 15) -> bytes: ... else: - def compress(__data: bytes, level: int = ...) -> bytes: ... + def compress(__data: ReadableBuffer, level: int = -1) -> bytes: ... def compressobj( - level: int = ..., method: int = ..., wbits: int = ..., memLevel: int = ..., strategy: int = ..., zdict: bytes | None = ... + level: int = -1, method: int = 8, wbits: int = 15, memLevel: int = 8, strategy: int = 0, zdict: ReadableBuffer | None = None ) -> _Compress: ... -def crc32(__data: array[Any] | bytes, __value: int = ...) -> int: ... -def decompress(__data: bytes, wbits: int = ..., bufsize: int = ...) -> bytes: ... -def decompressobj(wbits: int = ..., zdict: bytes = ...) -> _Decompress: ... +def crc32(__data: ReadableBuffer, __value: int = 0) -> int: ... +def decompress(__data: ReadableBuffer, wbits: int = 15, bufsize: int = 16384) -> bytes: ... +def decompressobj(wbits: int = 15, zdict: ReadableBuffer = b"") -> _Decompress: ... diff --git a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi index 1a0760862733..fe994be3e8ff 100644 --- a/mypy/typeshed/stdlib/zoneinfo/__init__.pyi +++ b/mypy/typeshed/stdlib/zoneinfo/__init__.pyi @@ -1,7 +1,8 @@ -from _typeshed import Self, StrPath +from _typeshed import StrPath from collections.abc import Iterable, Sequence -from datetime import tzinfo +from datetime import datetime, timedelta, tzinfo from typing import Any, Protocol +from typing_extensions import Self __all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] @@ -14,16 +15,19 @@ class ZoneInfo(tzinfo): def key(self) -> str: ... def __init__(self, key: str) -> None: ... @classmethod - def no_cache(cls: type[Self], key: str) -> Self: ... + def no_cache(cls, key: str) -> Self: ... @classmethod - def from_file(cls: type[Self], __fobj: _IOBytes, key: str | None = ...) -> Self: ... + def from_file(cls, __fobj: _IOBytes, key: str | None = ...) -> Self: ... @classmethod - def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ... + def clear_cache(cls, *, only_keys: Iterable[str] | None = ...) -> None: ... + def tzname(self, __dt: datetime | None) -> str | None: ... + def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + def dst(self, __dt: datetime | None) -> timedelta | None: ... # Note: Both here and in clear_cache, the types allow the use of `str` where # a sequence of strings is required. This should be remedied if a solution # to this typing bug is found: https://github.com/python/typing/issues/256 -def reset_tzpath(to: Sequence[StrPath] | None = ...) -> None: ... +def reset_tzpath(to: Sequence[StrPath] | None = None) -> None: ... def available_timezones() -> set[str]: ... TZPATH: Sequence[str] diff --git a/mypy/typeshed/stubs/mypy-extensions/METADATA.toml b/mypy/typeshed/stubs/mypy-extensions/METADATA.toml index de6579f75d05..516f11f6b9e2 100644 --- a/mypy/typeshed/stubs/mypy-extensions/METADATA.toml +++ b/mypy/typeshed/stubs/mypy-extensions/METADATA.toml @@ -1,4 +1,4 @@ -version = "0.4.*" +version = "1.0.*" [tool.stubtest] ignore_missing_stub = false diff --git a/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi b/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi index edefcc318176..40e24645fb77 100644 --- a/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi +++ b/mypy/typeshed/stubs/mypy-extensions/mypy_extensions.pyi @@ -1,8 +1,14 @@ +# These stubs are forked from typeshed, since we use some definitions that only make +# sense in the context of mypy/mypyc (in particular, native int types such as i64). + import abc import sys +from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import IdentityFunction, Self -from collections.abc import ItemsView, KeysView, Mapping, ValuesView -from typing import Any, ClassVar, Generic, TypeVar, overload, type_check_only +from collections.abc import Mapping +from typing import Any, ClassVar, Generic, SupportsInt, TypeVar, overload, type_check_only +from typing_extensions import Never, SupportsIndex +from _typeshed import ReadableBuffer, SupportsTrunc _T = TypeVar("_T") _U = TypeVar("_U") @@ -15,16 +21,16 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): # Unlike typing(_extensions).TypedDict, # subclasses of mypy_extensions.TypedDict do NOT have the __required_keys__ and __optional_keys__ ClassVars def copy(self: Self) -> Self: ... - # Using NoReturn so that only calls using mypy plugin hook that specialize the signature + # Using Never so that only calls using mypy plugin hook that specialize the signature # can go through. - def setdefault(self, k: NoReturn, default: object) -> object: ... + def setdefault(self, k: Never, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. - def pop(self, k: NoReturn, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] def update(self: Self, __m: Self) -> None: ... - def items(self) -> ItemsView[str, object]: ... - def keys(self) -> KeysView[str]: ... - def values(self) -> ValuesView[object]: ... - def __delitem__(self, k: NoReturn) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... if sys.version_info >= (3, 9): def __or__(self: Self, __other: Self) -> Self: ... def __ior__(self: Self, __other: Self) -> Self: ... @@ -66,3 +72,77 @@ def trait(cls: _T) -> _T: ... def mypyc_attr(*attrs: str, **kwattrs: object) -> IdentityFunction: ... class FlexibleAlias(Generic[_T, _U]): ... + +# Native int types such as i64 are magical and support implicit +# coercions to/from int using special logic in mypy. We generally only +# include operations here for which we have specialized primitives. + +class i64: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i64: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i64: ... + + def __add__(self, x: i64) -> i64: ... + def __radd__(self, x: i64) -> i64: ... + def __sub__(self, x: i64) -> i64: ... + def __rsub__(self, x: i64) -> i64: ... + def __mul__(self, x: i64) -> i64: ... + def __rmul__(self, x: i64) -> i64: ... + def __floordiv__(self, x: i64) -> i64: ... + def __rfloordiv__(self, x: i64) -> i64: ... + def __mod__(self, x: i64) -> i64: ... + def __rmod__(self, x: i64) -> i64: ... + def __and__(self, x: i64) -> i64: ... + def __rand__(self, x: i64) -> i64: ... + def __or__(self, x: i64) -> i64: ... + def __ror__(self, x: i64) -> i64: ... + def __xor__(self, x: i64) -> i64: ... + def __rxor__(self, x: i64) -> i64: ... + def __lshift__(self, x: i64) -> i64: ... + def __rlshift__(self, x: i64) -> i64: ... + def __rshift__(self, x: i64) -> i64: ... + def __rrshift__(self, x: i64) -> i64: ... + def __neg__(self) -> i64: ... + def __invert__(self) -> i64: ... + def __pos__(self) -> i64: ... + def __lt__(self, x: i64) -> bool: ... + def __le__(self, x: i64) -> bool: ... + def __ge__(self, x: i64) -> bool: ... + def __gt__(self, x: i64) -> bool: ... + def __index__(self) -> int: ... + +class i32: + @overload + def __new__(cls, __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> i32: ... + @overload + def __new__(cls, __x: str | bytes | bytearray, base: SupportsIndex) -> i32: ... + + def __add__(self, x: i32) -> i32: ... + def __radd__(self, x: i32) -> i32: ... + def __sub__(self, x: i32) -> i32: ... + def __rsub__(self, x: i32) -> i32: ... + def __mul__(self, x: i32) -> i32: ... + def __rmul__(self, x: i32) -> i32: ... + def __floordiv__(self, x: i32) -> i32: ... + def __rfloordiv__(self, x: i32) -> i32: ... + def __mod__(self, x: i32) -> i32: ... + def __rmod__(self, x: i32) -> i32: ... + def __and__(self, x: i32) -> i32: ... + def __rand__(self, x: i32) -> i32: ... + def __or__(self, x: i32) -> i32: ... + def __ror__(self, x: i32) -> i32: ... + def __xor__(self, x: i32) -> i32: ... + def __rxor__(self, x: i32) -> i32: ... + def __lshift__(self, x: i32) -> i32: ... + def __rlshift__(self, x: i32) -> i32: ... + def __rshift__(self, x: i32) -> i32: ... + def __rrshift__(self, x: i32) -> i32: ... + def __neg__(self) -> i32: ... + def __invert__(self) -> i32: ... + def __pos__(self) -> i32: ... + def __lt__(self, x: i32) -> bool: ... + def __le__(self, x: i32) -> bool: ... + def __ge__(self, x: i32) -> bool: ... + def __gt__(self, x: i32) -> bool: ... + def __index__(self) -> int: ... diff --git a/mypy/typestate.py b/mypy/typestate.py index a5d65c4b4ea3..9cbad17aa7bd 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -5,12 +5,12 @@ from __future__ import annotations -from typing import ClassVar, Dict, Set, Tuple +from typing import Dict, Set, Tuple from typing_extensions import Final, TypeAlias as _TypeAlias from mypy.nodes import TypeInfo from mypy.server.trigger import make_trigger -from mypy.types import Instance, Type, get_proper_type +from mypy.types import Instance, Type, TypeVarId, get_proper_type # Represents that the 'left' instance is a subtype of the 'right' instance SubtypeRelationship: _TypeAlias = Tuple[Instance, Instance] @@ -40,7 +40,7 @@ class TypeState: # was done in strict optional mode and of the specific *kind* of subtyping relationship, # which we represent as an arbitrary hashable tuple. # We need the caches, since subtype checks for structural types are very slow. - _subtype_caches: Final[SubtypeCache] = {} + _subtype_caches: Final[SubtypeCache] # This contains protocol dependencies generated after running a full build, # or after an update. These dependencies are special because: @@ -53,7 +53,7 @@ class TypeState: # A blocking error will be generated in this case, since we can't proceed safely. # For the description of kinds of protocol dependencies and corresponding examples, # see _snapshot_protocol_deps. - proto_deps: ClassVar[dict[str, set[str]] | None] = {} + proto_deps: dict[str, set[str]] | None # Protocols (full names) a given class attempted to implement. # Used to calculate fine grained protocol dependencies and optimize protocol @@ -61,13 +61,13 @@ class TypeState: # of type a.A to a function expecting something compatible with protocol p.P, # we'd have 'a.A' -> {'p.P', ...} in the map. This map is flushed after every incremental # update. - _attempted_protocols: Final[dict[str, set[str]]] = {} + _attempted_protocols: Final[dict[str, set[str]]] # We also snapshot protocol members of the above protocols. For example, if we pass # a value of type a.A to a function expecting something compatible with Iterable, we'd have # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental # update. This map is needed to only generate dependencies like -> # instead of a wildcard to avoid unnecessarily invalidating classes. - _checked_against_members: Final[dict[str, set[str]]] = {} + _checked_against_members: Final[dict[str, set[str]]] # TypeInfos that appeared as a left type (subtype) in a subtype check since latest # dependency snapshot update. This is an optimisation for fine grained mode; during a full # run we only take a dependency snapshot at the very end, so this set will contain all @@ -75,74 +75,78 @@ class TypeState: # dependencies generated from (typically) few TypeInfos that were subtype-checked # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in # a re-checked target) during the update. - _rechecked_types: Final[set[TypeInfo]] = set() + _rechecked_types: Final[set[TypeInfo]] # The two attributes below are assumption stacks for subtyping relationships between # recursive type aliases. Normally, one would pass type assumptions as an additional # arguments to is_subtype(), but this would mean updating dozens of related functions # threading this through all callsites (see also comment for TypeInfo.assuming). - _assuming: Final[list[tuple[Type, Type]]] = [] - _assuming_proper: Final[list[tuple[Type, Type]]] = [] + _assuming: Final[list[tuple[Type, Type]]] + _assuming_proper: Final[list[tuple[Type, Type]]] # Ditto for inference of generic constraints against recursive type aliases. - inferring: Final[list[tuple[Type, Type]]] = [] + inferring: Final[list[tuple[Type, Type]]] # Whether to use joins or unions when solving constraints, see checkexpr.py for details. - infer_unions: ClassVar = False + infer_unions: bool # N.B: We do all of the accesses to these properties through # TypeState, instead of making these classmethods and accessing # via the cls parameter, since mypyc can optimize accesses to # Final attributes of a directly referenced type. - @staticmethod - def is_assumed_subtype(left: Type, right: Type) -> bool: - for (l, r) in reversed(TypeState._assuming): + def __init__(self) -> None: + self._subtype_caches = {} + self.proto_deps = {} + self._attempted_protocols = {} + self._checked_against_members = {} + self._rechecked_types = set() + self._assuming = [] + self._assuming_proper = [] + self.inferring = [] + self.infer_unions = False + + def is_assumed_subtype(self, left: Type, right: Type) -> bool: + for (l, r) in reversed(self._assuming): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False - @staticmethod - def is_assumed_proper_subtype(left: Type, right: Type) -> bool: - for (l, r) in reversed(TypeState._assuming_proper): + def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool: + for (l, r) in reversed(self._assuming_proper): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False - @staticmethod - def get_assumptions(is_proper: bool) -> list[tuple[Type, Type]]: + def get_assumptions(self, is_proper: bool) -> list[tuple[Type, Type]]: if is_proper: - return TypeState._assuming_proper - return TypeState._assuming + return self._assuming_proper + return self._assuming - @staticmethod - def reset_all_subtype_caches() -> None: + def reset_all_subtype_caches(self) -> None: """Completely reset all known subtype caches.""" - TypeState._subtype_caches.clear() + self._subtype_caches.clear() - @staticmethod - def reset_subtype_caches_for(info: TypeInfo) -> None: + def reset_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo.""" - if info in TypeState._subtype_caches: - TypeState._subtype_caches[info].clear() + if info in self._subtype_caches: + self._subtype_caches[info].clear() - @staticmethod - def reset_all_subtype_caches_for(info: TypeInfo) -> None: + def reset_all_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" for item in info.mro: - TypeState.reset_subtype_caches_for(item) + self.reset_subtype_caches_for(item) - @staticmethod - def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) -> bool: + def is_cached_subtype_check(self, kind: SubtypeKind, left: Instance, right: Instance) -> bool: if left.last_known_value is not None or right.last_known_value is not None: # If there is a literal last known value, give up. There # will be an unbounded number of potential types to cache, # making caching less effective. return False info = right.type - cache = TypeState._subtype_caches.get(info) + cache = self._subtype_caches.get(info) if cache is None: return False subcache = cache.get(kind) @@ -150,36 +154,32 @@ def is_cached_subtype_check(kind: SubtypeKind, left: Instance, right: Instance) return False return (left, right) in subcache - @staticmethod - def record_subtype_cache_entry(kind: SubtypeKind, left: Instance, right: Instance) -> None: + def record_subtype_cache_entry( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> None: if left.last_known_value is not None or right.last_known_value is not None: # These are unlikely to match, due to the large space of # possible values. Avoid uselessly increasing cache sizes. return - cache = TypeState._subtype_caches.setdefault(right.type, dict()) + cache = self._subtype_caches.setdefault(right.type, dict()) cache.setdefault(kind, set()).add((left, right)) - @staticmethod - def reset_protocol_deps() -> None: + def reset_protocol_deps(self) -> None: """Reset dependencies after a full run or before a daemon shutdown.""" - TypeState.proto_deps = {} - TypeState._attempted_protocols.clear() - TypeState._checked_against_members.clear() - TypeState._rechecked_types.clear() + self.proto_deps = {} + self._attempted_protocols.clear() + self._checked_against_members.clear() + self._rechecked_types.clear() - @staticmethod - def record_protocol_subtype_check(left_type: TypeInfo, right_type: TypeInfo) -> None: + def record_protocol_subtype_check(self, left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol - TypeState._rechecked_types.add(left_type) - TypeState._attempted_protocols.setdefault(left_type.fullname, set()).add( - right_type.fullname - ) - TypeState._checked_against_members.setdefault(left_type.fullname, set()).update( + self._rechecked_types.add(left_type) + self._attempted_protocols.setdefault(left_type.fullname, set()).add(right_type.fullname) + self._checked_against_members.setdefault(left_type.fullname, set()).update( right_type.protocol_members ) - @staticmethod - def _snapshot_protocol_deps() -> dict[str, set[str]]: + def _snapshot_protocol_deps(self) -> dict[str, set[str]]: """Collect protocol attribute dependencies found so far from registered subtype checks. There are three kinds of protocol dependencies. For example, after a subtype check: @@ -209,8 +209,8 @@ def __iter__(self) -> Iterator[int]: 'subtypes.is_protocol_implementation'). """ deps: dict[str, set[str]] = {} - for info in TypeState._rechecked_types: - for attr in TypeState._checked_against_members[info.fullname]: + for info in self._rechecked_types: + for attr in self._checked_against_members[info.fullname]: # The need for full MRO here is subtle, during an update, base classes of # a concrete class may not be reprocessed, so not all -> deps # are added. @@ -220,7 +220,7 @@ def __iter__(self) -> Iterator[int]: # TODO: avoid everything from typeshed continue deps.setdefault(trigger, set()).add(make_trigger(info.fullname)) - for proto in TypeState._attempted_protocols[info.fullname]: + for proto in self._attempted_protocols[info.fullname]: trigger = make_trigger(info.fullname) if "typing" in trigger or "builtins" in trigger: continue @@ -233,45 +233,45 @@ def __iter__(self) -> Iterator[int]: deps.setdefault(trigger, set()).add(proto) return deps - @staticmethod - def update_protocol_deps(second_map: dict[str, set[str]] | None = None) -> None: + def update_protocol_deps(self, second_map: dict[str, set[str]] | None = None) -> None: """Update global protocol dependency map. We update the global map incrementally, using a snapshot only from recently type checked types. If second_map is given, update it as well. This is currently used by FineGrainedBuildManager that maintains normal (non-protocol) dependencies. """ - assert ( - TypeState.proto_deps is not None - ), "This should not be called after failed cache load" - new_deps = TypeState._snapshot_protocol_deps() + assert self.proto_deps is not None, "This should not be called after failed cache load" + new_deps = self._snapshot_protocol_deps() for trigger, targets in new_deps.items(): - TypeState.proto_deps.setdefault(trigger, set()).update(targets) + self.proto_deps.setdefault(trigger, set()).update(targets) if second_map is not None: for trigger, targets in new_deps.items(): second_map.setdefault(trigger, set()).update(targets) - TypeState._rechecked_types.clear() - TypeState._attempted_protocols.clear() - TypeState._checked_against_members.clear() + self._rechecked_types.clear() + self._attempted_protocols.clear() + self._checked_against_members.clear() - @staticmethod - def add_all_protocol_deps(deps: dict[str, set[str]]) -> None: + def add_all_protocol_deps(self, deps: dict[str, set[str]]) -> None: """Add all known protocol dependencies to deps. This is used by tests and debug output, and also when collecting all collected or loaded dependencies as part of build. """ - TypeState.update_protocol_deps() # just in case - if TypeState.proto_deps is not None: - for trigger, targets in TypeState.proto_deps.items(): + self.update_protocol_deps() # just in case + if self.proto_deps is not None: + for trigger, targets in self.proto_deps.items(): deps.setdefault(trigger, set()).update(targets) +type_state: Final = TypeState() + + def reset_global_state() -> None: """Reset most existing global state. Currently most of it is in this module. Few exceptions are strict optional status and and functools.lru_cache. """ - TypeState.reset_all_subtype_caches() - TypeState.reset_protocol_deps() + type_state.reset_all_subtype_caches() + type_state.reset_protocol_deps() + TypeVarId.next_raw_id = 1 diff --git a/mypy/typetraverser.py b/mypy/typetraverser.py index afe77efff78d..d9ab54871f4a 100644 --- a/mypy/typetraverser.py +++ b/mypy/typetraverser.py @@ -20,7 +20,6 @@ PartialType, PlaceholderType, RawExpressionType, - StarType, SyntheticTypeVisitor, TupleType, Type, @@ -115,9 +114,6 @@ def visit_unbound_type(self, t: UnboundType) -> None: def visit_type_list(self, t: TypeList) -> None: self.traverse_types(t.items) - def visit_star_type(self, t: StarType) -> None: - t.type.accept(self) - def visit_ellipsis_type(self, t: EllipsisType) -> None: pass @@ -131,6 +127,9 @@ def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_type_alias_type(self, t: TypeAliasType) -> None: + # TODO: sometimes we want to traverse target as well + # We need to find a way to indicate explicitly the intent, + # maybe make this method abstract (like for TypeTranslator)? self.traverse_types(t.args) def visit_unpack_type(self, t: UnpackType) -> None: diff --git a/mypy/typevars.py b/mypy/typevars.py index 9c813550d5ea..69c2eed37fa4 100644 --- a/mypy/typevars.py +++ b/mypy/typevars.py @@ -39,7 +39,15 @@ def fill_typevars(typ: TypeInfo) -> Instance | TupleType: ) elif isinstance(tv, TypeVarTupleType): tv = UnpackType( - TypeVarTupleType(tv.name, tv.fullname, tv.id, tv.upper_bound, line=-1, column=-1) + TypeVarTupleType( + tv.name, + tv.fullname, + tv.id, + tv.upper_bound, + tv.tuple_fallback, + line=-1, + column=-1, + ) ) else: assert isinstance(tv, ParamSpecType) diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index a63ebf3bfe08..29b85dae72eb 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -4,7 +4,8 @@ from typing import Sequence, TypeVar -from mypy.types import Instance, ProperType, Type, UnpackType, get_proper_type +from mypy.nodes import ARG_POS, ARG_STAR +from mypy.types import CallableType, Instance, ProperType, Type, UnpackType, get_proper_type def find_unpack_in_list(items: Sequence[Type]) -> int | None: @@ -44,6 +45,133 @@ def split_with_instance( ) +def split_with_mapped_and_template( + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, +) -> tuple[ + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], +] | None: + split_result = fully_split_with_mapped_and_template( + mapped, + mapped_prefix_len, + mapped_suffix_len, + template, + template_prefix_len, + template_suffix_len, + ) + if split_result is None: + return None + + ( + mapped_prefix, + mapped_middle_prefix, + mapped_middle_middle, + mapped_middle_suffix, + mapped_suffix, + template_prefix, + template_middle_prefix, + template_middle_middle, + template_middle_suffix, + template_suffix, + ) = split_result + + return ( + mapped_prefix + mapped_middle_prefix, + mapped_middle_middle, + mapped_middle_suffix + mapped_suffix, + template_prefix + template_middle_prefix, + template_middle_middle, + template_middle_suffix + template_suffix, + ) + + +def fully_split_with_mapped_and_template( + mapped: tuple[Type, ...], + mapped_prefix_len: int | None, + mapped_suffix_len: int | None, + template: tuple[Type, ...], + template_prefix_len: int, + template_suffix_len: int, +) -> tuple[ + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], +] | None: + if mapped_prefix_len is not None: + assert mapped_suffix_len is not None + mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix( + tuple(mapped), mapped_prefix_len, mapped_suffix_len + ) + else: + mapped_prefix = tuple() + mapped_suffix = tuple() + mapped_middle = mapped + + template_prefix, template_middle, template_suffix = split_with_prefix_and_suffix( + tuple(template), template_prefix_len, template_suffix_len + ) + + unpack_prefix = find_unpack_in_list(template_middle) + if unpack_prefix is None: + return ( + mapped_prefix, + (), + mapped_middle, + (), + mapped_suffix, + template_prefix, + (), + template_middle, + (), + template_suffix, + ) + + unpack_suffix = len(template_middle) - unpack_prefix - 1 + # mapped_middle is too short to do the unpack + if unpack_prefix + unpack_suffix > len(mapped_middle): + return None + + ( + mapped_middle_prefix, + mapped_middle_middle, + mapped_middle_suffix, + ) = split_with_prefix_and_suffix(mapped_middle, unpack_prefix, unpack_suffix) + ( + template_middle_prefix, + template_middle_middle, + template_middle_suffix, + ) = split_with_prefix_and_suffix(template_middle, unpack_prefix, unpack_suffix) + + return ( + mapped_prefix, + mapped_middle_prefix, + mapped_middle_middle, + mapped_middle_suffix, + mapped_suffix, + template_prefix, + template_middle_prefix, + template_middle_middle, + template_middle_suffix, + template_suffix, + ) + + def extract_unpack(types: Sequence[Type]) -> ProperType | None: """Given a list of types, extracts either a single type from an unpack, or returns None.""" if len(types) == 1: @@ -51,3 +179,20 @@ def extract_unpack(types: Sequence[Type]) -> ProperType | None: if isinstance(proper_type, UnpackType): return get_proper_type(proper_type.type) return None + + +def replace_starargs(callable: CallableType, types: list[Type]) -> CallableType: + star_index = callable.arg_kinds.index(ARG_STAR) + arg_kinds = ( + callable.arg_kinds[:star_index] + + [ARG_POS] * len(types) + + callable.arg_kinds[star_index + 1 :] + ) + arg_names = ( + callable.arg_names[:star_index] + + [None] * len(types) + + callable.arg_names[star_index + 1 :] + ) + arg_types = callable.arg_types[:star_index] + types + callable.arg_types[star_index + 1 :] + + return callable.copy_modified(arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds) diff --git a/mypy/util.py b/mypy/util.py index 686a71c4331b..2c225c7fe651 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -25,11 +25,14 @@ T = TypeVar("T") -with importlib_resources.path( - "mypy", # mypy-c doesn't support __package__ - "py.typed", # a marker file for type information, we assume typeshed to live in the same dir -) as _resource: - TYPESHED_DIR: Final = str(_resource.parent / "typeshed") +if sys.version_info >= (3, 9): + TYPESHED_DIR: Final = str(importlib_resources.files("mypy") / "typeshed") +else: + with importlib_resources.path( + "mypy", # mypy-c doesn't support __package__ + "py.typed", # a marker file for type information, we assume typeshed to live in the same dir + ) as _resource: + TYPESHED_DIR = str(_resource.parent / "typeshed") ENCODING_RE: Final = re.compile(rb"([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)") @@ -516,24 +519,33 @@ def parse_gray_color(cup: bytes) -> str: return gray +def should_force_color() -> bool: + env_var = os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")) + try: + return bool(int(env_var)) + except ValueError: + return bool(env_var) + + class FancyFormatter: """Apply color and bold font to terminal output. This currently only works on Linux and Mac. """ - def __init__(self, f_out: IO[str], f_err: IO[str], show_error_codes: bool) -> None: - self.show_error_codes = show_error_codes + def __init__(self, f_out: IO[str], f_err: IO[str], hide_error_codes: bool) -> None: + self.hide_error_codes = hide_error_codes # Check if we are in a human-facing terminal on a supported platform. - if sys.platform not in ("linux", "darwin", "win32"): + if sys.platform not in ("linux", "darwin", "win32", "emscripten"): self.dummy_term = True return - force_color = int(os.getenv("MYPY_FORCE_COLOR", "0")) - if not force_color and (not f_out.isatty() or not f_err.isatty()): + if not should_force_color() and (not f_out.isatty() or not f_err.isatty()): self.dummy_term = True return if sys.platform == "win32": self.dummy_term = not self.initialize_win_colors() + elif sys.platform == "emscripten": + self.dummy_term = not self.initialize_vt100_colors() else: self.dummy_term = not self.initialize_unix_colors() if not self.dummy_term: @@ -545,6 +557,20 @@ def __init__(self, f_out: IO[str], f_err: IO[str], show_error_codes: bool) -> No "none": "", } + def initialize_vt100_colors(self) -> bool: + """Return True if initialization was successful and we can use colors, False otherwise""" + # Windows and Emscripten can both use ANSI/VT100 escape sequences for color + assert sys.platform in ("win32", "emscripten") + self.BOLD = "\033[1m" + self.UNDER = "\033[4m" + self.BLUE = "\033[94m" + self.GREEN = "\033[92m" + self.RED = "\033[91m" + self.YELLOW = "\033[93m" + self.NORMAL = "\033[0m" + self.DIM = "\033[2m" + return True + def initialize_win_colors(self) -> bool: """Return True if initialization was successful and we can use colors, False otherwise""" # Windows ANSI escape sequences are only supported on Threshold 2 and above. @@ -571,14 +597,7 @@ def initialize_win_colors(self) -> bool: | ENABLE_WRAP_AT_EOL_OUTPUT | ENABLE_VIRTUAL_TERMINAL_PROCESSING, ) - self.BOLD = "\033[1m" - self.UNDER = "\033[4m" - self.BLUE = "\033[94m" - self.GREEN = "\033[92m" - self.RED = "\033[91m" - self.YELLOW = "\033[93m" - self.NORMAL = "\033[0m" - self.DIM = "\033[2m" + self.initialize_vt100_colors() return True return False @@ -681,7 +700,7 @@ def colorize(self, error: str) -> str: """Colorize an output line by highlighting the status and error code.""" if ": error:" in error: loc, msg = error.split("error:", maxsplit=1) - if not self.show_error_codes: + if self.hide_error_codes: return ( loc + self.style("error:", "red", bold=True) + self.highlight_quote_groups(msg) ) @@ -769,9 +788,10 @@ def format_error( return self.style(msg, "red", bold=True) -def is_typeshed_file(file: str) -> bool: +def is_typeshed_file(typeshed_dir: str | None, file: str) -> bool: + typeshed_dir = typeshed_dir if typeshed_dir is not None else TYPESHED_DIR try: - return os.path.commonpath((TYPESHED_DIR, os.path.abspath(file))) == TYPESHED_DIR + return os.path.commonpath((typeshed_dir, os.path.abspath(file))) == typeshed_dir except ValueError: # Different drives on Windows return False @@ -787,18 +807,16 @@ def unnamed_function(name: str | None) -> bool: return name is not None and name == "_" -# TODO: replace with uses of perf_counter_ns when support for py3.6 is dropped -# (or when mypy properly handles alternate definitions based on python version check -time_ref = time.perf_counter +time_ref = time.perf_counter_ns -def time_spent_us(t0: float) -> int: - return int((time.perf_counter() - t0) * 1e6) +def time_spent_us(t0: int) -> int: + return int((time.perf_counter_ns() - t0) / 1000) def plural_s(s: int | Sized) -> str: count = s if isinstance(s, int) else len(s) - if count > 1: + if count != 1: return "s" else: return "" diff --git a/mypy/version.py b/mypy/version.py index e0dc42b478f8..4d32a1d18dc8 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -5,10 +5,10 @@ from mypy import git # Base version. -# - Release versions have the form "0.NNN". -# - Dev versions have the form "0.NNN+dev" (PLUS sign to conform to PEP 440). -# - For 1.0 we'll switch back to 1.2.3 form. -__version__ = "0.980+dev" +# - Release versions have the form "1.2.3". +# - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). +# - Before 1.0 we had the form "0.NNN". +__version__ = "1.1.1" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) diff --git a/mypy/visitor.py b/mypy/visitor.py index 62e7b4f90c8e..c5aa3caa8295 100644 --- a/mypy/visitor.py +++ b/mypy/visitor.py @@ -355,7 +355,8 @@ class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T], Pattern methods. As all methods defined here return None by default, subclasses do not always need to override all the methods. - TODO make the default return value explicit + TODO: make the default return value explicit, then turn on + empty body checking in mypy_self_check.ini. """ # Not in superclasses: diff --git a/mypy_bootstrap.ini b/mypy_bootstrap.ini index 3a6eee6449d2..c680990fbd9e 100644 --- a/mypy_bootstrap.ini +++ b/mypy_bootstrap.ini @@ -13,3 +13,7 @@ warn_redundant_casts = True warn_unused_configs = True show_traceback = True always_true = MYPYC + +[mypy-mypy.visitor] +# See docstring for NodeVisitor for motivation. +disable_error_code = empty-body diff --git a/mypy_self_check.ini b/mypy_self_check.ini index 5dc497528fab..d20fcd60a9cb 100644 --- a/mypy_self_check.ini +++ b/mypy_self_check.ini @@ -1,30 +1,15 @@ [mypy] -warn_unused_configs = True -disallow_any_generics = True -disallow_subclassing_any = True -disallow_untyped_calls = True -disallow_untyped_defs = True -disallow_incomplete_defs = True -check_untyped_defs = True -disallow_untyped_decorators = True -no_implicit_optional = True -warn_redundant_casts = True -warn_unused_ignores = True -no_implicit_reexport = True -strict_equality = True -strict_concatenate = True - -; This is the only setting in --strict that we don't have enabled -warn_return_any = False - -warn_no_return = True -strict_optional = True +strict = True disallow_any_unimported = True show_traceback = True -show_error_codes = True pretty = True always_false = MYPYC plugins = misc/proper_plugin.py python_version = 3.7 exclude = mypy/typeshed/|mypyc/test-data/|mypyc/lib-rt/ +enable_error_code = ignore-without-code,redundant-expr + +[mypy-mypy.visitor] +# See docstring for NodeVisitor for motivation. +disable_error_code = empty-body diff --git a/mypyc/analysis/attrdefined.py b/mypyc/analysis/attrdefined.py index 170c0029ba04..02e02a82a4f9 100644 --- a/mypyc/analysis/attrdefined.py +++ b/mypyc/analysis/attrdefined.py @@ -120,6 +120,11 @@ def analyze_always_defined_attrs(class_irs: list[ClassIR]) -> None: for cl in class_irs: update_always_defined_attrs_using_subclasses(cl, seen) + # Final pass: detect attributes that need to use a bitmap to track definedness + seen = set() + for cl in class_irs: + detect_undefined_bitmap(cl, seen) + def analyze_always_defined_attrs_in_class(cl: ClassIR, seen: set[ClassIR]) -> None: if cl in seen: @@ -407,3 +412,26 @@ def update_always_defined_attrs_using_subclasses(cl: ClassIR, seen: set[ClassIR] removed.add(attr) cl._always_initialized_attrs -= removed seen.add(cl) + + +def detect_undefined_bitmap(cl: ClassIR, seen: Set[ClassIR]) -> None: + if cl.is_trait: + return + + if cl in seen: + return + seen.add(cl) + for base in cl.base_mro[1:]: + detect_undefined_bitmap(cl, seen) + + if len(cl.base_mro) > 1: + cl.bitmap_attrs.extend(cl.base_mro[1].bitmap_attrs) + for n, t in cl.attributes.items(): + if t.error_overlap and not cl.is_always_defined(n): + cl.bitmap_attrs.append(n) + + for base in cl.mro[1:]: + if base.is_trait: + for n, t in base.attributes.items(): + if t.error_overlap and not cl.is_always_defined(n) and n not in cl.bitmap_attrs: + cl.bitmap_attrs.append(n) diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index 824d64a1bf4b..21c4da8981d1 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -68,7 +68,7 @@ def __init__( def __str__(self) -> str: lines = [] - lines.append("exits: %s" % sorted(self.exits, key=lambda e: e.label)) + lines.append("exits: %s" % sorted(self.exits, key=lambda e: int(e.label))) lines.append("succ: %s" % self.succ) lines.append("pred: %s" % self.pred) return "\n".join(lines) diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index c2cdd073f62e..719faebfcee8 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -129,7 +129,11 @@ def check_op_sources_valid(fn: FuncIR) -> list[FnError]: for block in fn.blocks: valid_ops.update(block.ops) - valid_registers.update([op.dest for op in block.ops if isinstance(op, BaseAssign)]) + for op in block.ops: + if isinstance(op, BaseAssign): + valid_registers.add(op.dest) + elif isinstance(op, LoadAddress) and isinstance(op.src, Register): + valid_registers.add(op.src) valid_registers.update(fn.arg_regs) @@ -150,7 +154,7 @@ def check_op_sources_valid(fn: FuncIR) -> list[FnError]: if source not in valid_registers: errors.append( FnError( - source=op, desc=f"Invalid op reference to register {source.name}" + source=op, desc=f"Invalid op reference to register {source.name!r}" ) ) @@ -213,6 +217,10 @@ def check_type_coercion(self, op: Op, src: RType, dest: RType) -> None: source=op, desc=f"Cannot coerce source type {src.name} to dest type {dest.name}" ) + def check_compatibility(self, op: Op, t: RType, s: RType) -> None: + if not can_coerce_to(t, s) or not can_coerce_to(s, t): + self.fail(source=op, desc=f"{t.name} and {s.name} are not compatible") + def visit_goto(self, op: Goto) -> None: self.check_control_op_targets(op) @@ -248,6 +256,15 @@ def check_tuple_items_valid_literals(self, op: LoadLiteral, t: tuple[object, ... if isinstance(x, tuple): self.check_tuple_items_valid_literals(op, x) + def check_frozenset_items_valid_literals(self, op: LoadLiteral, s: frozenset[object]) -> None: + for x in s: + if x is None or isinstance(x, (str, bytes, bool, int, float, complex)): + pass + elif isinstance(x, tuple): + self.check_tuple_items_valid_literals(op, x) + else: + self.fail(op, f"Invalid type for item of frozenset literal: {type(x)})") + def visit_load_literal(self, op: LoadLiteral) -> None: expected_type = None if op.value is None: @@ -267,6 +284,11 @@ def visit_load_literal(self, op: LoadLiteral) -> None: elif isinstance(op.value, tuple): expected_type = "builtins.tuple" self.check_tuple_items_valid_literals(op, op.value) + elif isinstance(op.value, frozenset): + # There's no frozenset_rprimitive type since it'd be pretty useless so we just pretend + # it's a set (when it's really a frozenset). + expected_type = "builtins.set" + self.check_frozenset_items_valid_literals(op, op.value) assert expected_type is not None, "Missed a case for LoadLiteral check" @@ -357,7 +379,7 @@ def visit_int_op(self, op: IntOp) -> None: pass def visit_comparison_op(self, op: ComparisonOp) -> None: - pass + self.check_compatibility(op, op.lhs.type, op.rhs.type) def visit_load_mem(self, op: LoadMem) -> None: pass diff --git a/mypyc/build.py b/mypyc/build.py index db548b149946..8e1ee8078c11 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -25,7 +25,7 @@ import re import sys import time -from typing import TYPE_CHECKING, Any, Dict, Iterable, NoReturn, cast +from typing import TYPE_CHECKING, Any, Dict, Iterable, NoReturn, Union, cast from mypy.build import BuildSource from mypy.errors import CompileError @@ -41,11 +41,17 @@ from mypyc.options import CompilerOptions if TYPE_CHECKING: - from distutils.core import Extension + from distutils.core import Extension as _distutils_Extension + from typing_extensions import TypeAlias + + from setuptools import Extension as _setuptools_Extension + + Extension: TypeAlias = Union[_setuptools_Extension, _distutils_Extension] + try: # Import setuptools so that it monkey-patch overrides distutils - import setuptools # noqa: F401 + import setuptools except ImportError: if sys.version_info >= (3, 12): # Raise on Python 3.12, since distutils will go away forever @@ -57,13 +63,16 @@ def get_extension() -> type[Extension]: # We can work with either setuptools or distutils, and pick setuptools # if it has been imported. use_setuptools = "setuptools" in sys.modules + extension_class: type[Extension] if not use_setuptools: - from distutils.core import Extension + import distutils.core + + extension_class = distutils.core.Extension else: - from setuptools import Extension + extension_class = setuptools.Extension - return Extension + return extension_class def setup_mypycify_vars() -> None: @@ -85,6 +94,15 @@ def fail(message: str) -> NoReturn: sys.exit(message) +def emit_messages(options: Options, messages: list[str], dt: float, serious: bool = False) -> None: + # ... you know, just in case. + if options.junit_xml: + py_version = f"{options.python_version[0]}_{options.python_version[1]}" + write_junit_xml(dt, serious, messages, options.junit_xml, py_version, options.platform) + if messages: + print("\n".join(messages)) + + def get_mypy_config( mypy_options: list[str], only_compile_paths: Iterable[str] | None, @@ -191,47 +209,35 @@ def generate_c( """ t0 = time.time() - # Do the actual work now - serious = False - result = None try: result = emitmodule.parse_and_typecheck( sources, options, compiler_options, groups, fscache ) - messages = result.errors except CompileError as e: - messages = e.messages - if not e.use_stdout: - serious = True + emit_messages(options, e.messages, time.time() - t0, serious=(not e.use_stdout)) + sys.exit(1) t1 = time.time() + if result.errors: + emit_messages(options, result.errors, t1 - t0) + sys.exit(1) + if compiler_options.verbose: print(f"Parsed and typechecked in {t1 - t0:.3f}s") - if not messages and result: - errors = Errors() - modules, ctext = emitmodule.compile_modules_to_c( - result, compiler_options=compiler_options, errors=errors, groups=groups - ) - - if errors.num_errors: - messages.extend(errors.new_messages()) - + errors = Errors() + modules, ctext = emitmodule.compile_modules_to_c( + result, compiler_options=compiler_options, errors=errors, groups=groups + ) t2 = time.time() + emit_messages(options, errors.new_messages(), t2 - t1) + if errors.num_errors: + # No need to stop the build if only warnings were emitted. + sys.exit(1) + if compiler_options.verbose: print(f"Compiled to C in {t2 - t1:.3f}s") - # ... you know, just in case. - if options.junit_xml: - py_version = f"{options.python_version[0]}_{options.python_version[1]}" - write_junit_xml( - t2 - t0, serious, messages, options.junit_xml, py_version, options.platform - ) - - if messages: - print("\n".join(messages)) - sys.exit(1) - return ctext, "\n".join(format_modules(modules)) @@ -533,10 +539,12 @@ def mypycify( "-Wno-unused-variable", "-Wno-unused-command-line-argument", "-Wno-unknown-warning-option", + "-Wno-unused-but-set-variable", + "-Wno-ignored-optimization-argument", + # Disables C Preprocessor (cpp) warnings + # See https://github.com/mypyc/mypyc/issues/956 + "-Wno-cpp", ] - if "gcc" in compiler.compiler[0] or "gnu-cc" in compiler.compiler[0]: - # This flag is needed for gcc but does not exist on clang. - cflags += ["-Wno-unused-but-set-variable"] elif compiler.compiler_type == "msvc": # msvc doesn't have levels, '/O2' is full and '/Od' is disable if opt_level == "0": diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 3fd48dcd1cb8..6e0c89dd0ecf 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -9,6 +9,7 @@ from mypyc.codegen.literals import Literals from mypyc.common import ( ATTR_PREFIX, + BITMAP_BITS, FAST_ISINSTANCE_MAX_SUBCLASSES, NATIVE_PREFIX, REG_PREFIX, @@ -329,21 +330,84 @@ def tuple_c_declaration(self, rtuple: RTuple) -> list[str]: return result + def bitmap_field(self, index: int) -> str: + """Return C field name used for attribute bitmap.""" + n = index // BITMAP_BITS + if n == 0: + return "bitmap" + return f"bitmap{n + 1}" + + def attr_bitmap_expr(self, obj: str, cl: ClassIR, index: int) -> str: + """Return reference to the attribute definedness bitmap.""" + cast = f"({cl.struct_name(self.names)} *)" + attr = self.bitmap_field(index) + return f"({cast}{obj})->{attr}" + + def emit_attr_bitmap_set( + self, value: str, obj: str, rtype: RType, cl: ClassIR, attr: str + ) -> None: + """Mark an attribute as defined in the attribute bitmap. + + Assumes that the attribute is tracked in the bitmap (only some attributes + use the bitmap). If 'value' is not equal to the error value, do nothing. + """ + self._emit_attr_bitmap_update(value, obj, rtype, cl, attr, clear=False) + + def emit_attr_bitmap_clear(self, obj: str, rtype: RType, cl: ClassIR, attr: str) -> None: + """Mark an attribute as undefined in the attribute bitmap. + + Unlike emit_attr_bitmap_set, clear unconditionally. + """ + self._emit_attr_bitmap_update("", obj, rtype, cl, attr, clear=True) + + def _emit_attr_bitmap_update( + self, value: str, obj: str, rtype: RType, cl: ClassIR, attr: str, clear: bool + ) -> None: + if value: + check = self.error_value_check(rtype, value, "==") + self.emit_line(f"if (unlikely({check})) {{") + index = cl.bitmap_attrs.index(attr) + mask = 1 << (index & (BITMAP_BITS - 1)) + bitmap = self.attr_bitmap_expr(obj, cl, index) + if clear: + self.emit_line(f"{bitmap} &= ~{mask};") + else: + self.emit_line(f"{bitmap} |= {mask};") + if value: + self.emit_line("}") + def use_vectorcall(self) -> bool: return use_vectorcall(self.capi_version) def emit_undefined_attr_check( - self, rtype: RType, attr_expr: str, compare: str, unlikely: bool = False + self, + rtype: RType, + attr_expr: str, + compare: str, + obj: str, + attr: str, + cl: ClassIR, + *, + unlikely: bool = False, ) -> None: + check = self.error_value_check(rtype, attr_expr, compare) + if unlikely: + check = f"unlikely({check})" + if rtype.error_overlap: + index = cl.bitmap_attrs.index(attr) + bit = 1 << (index & (BITMAP_BITS - 1)) + attr = self.bitmap_field(index) + obj_expr = f"({cl.struct_name(self.names)} *){obj}" + check = f"{check} && !(({obj_expr})->{attr} & {bit})" + self.emit_line(f"if ({check}) {{") + + def error_value_check(self, rtype: RType, value: str, compare: str) -> str: if isinstance(rtype, RTuple): - check = "({})".format( - self.tuple_undefined_check_cond(rtype, attr_expr, self.c_undefined_value, compare) + return self.tuple_undefined_check_cond( + rtype, value, self.c_error_value, compare, check_exception=False ) else: - check = f"({attr_expr} {compare} {self.c_undefined_value(rtype)})" - if unlikely: - check = f"(unlikely{check})" - self.emit_line(f"if {check} {{") + return f"{value} {compare} {self.c_error_value(rtype)}" def tuple_undefined_check_cond( self, @@ -351,19 +415,33 @@ def tuple_undefined_check_cond( tuple_expr_in_c: str, c_type_compare_val: Callable[[RType], str], compare: str, + *, + check_exception: bool = True, ) -> str: if len(rtuple.types) == 0: # empty tuple return "{}.empty_struct_error_flag {} {}".format( tuple_expr_in_c, compare, c_type_compare_val(int_rprimitive) ) - item_type = rtuple.types[0] + if rtuple.error_overlap: + i = 0 + item_type = rtuple.types[0] + else: + for i, typ in enumerate(rtuple.types): + if not typ.error_overlap: + item_type = rtuple.types[i] + break + else: + assert False, "not expecting tuple with error overlap" if isinstance(item_type, RTuple): return self.tuple_undefined_check_cond( - item_type, tuple_expr_in_c + ".f0", c_type_compare_val, compare + item_type, tuple_expr_in_c + f".f{i}", c_type_compare_val, compare ) else: - return f"{tuple_expr_in_c}.f0 {compare} {c_type_compare_val(item_type)}" + check = f"{tuple_expr_in_c}.f{i} {compare} {c_type_compare_val(item_type)}" + if rtuple.error_overlap and check_exception: + check += " && PyErr_Occurred()" + return check def tuple_undefined_value(self, rtuple: RTuple) -> str: return "tuple_undefined_" + rtuple.unique_id @@ -925,14 +1003,18 @@ def emit_box( def emit_error_check(self, value: str, rtype: RType, failure: str) -> None: """Emit code for checking a native function return value for uncaught exception.""" - if not isinstance(rtype, RTuple): - self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") - else: + if isinstance(rtype, RTuple): if len(rtype.types) == 0: return # empty tuples can't fail. else: cond = self.tuple_undefined_check_cond(rtype, value, self.c_error_value, "==") self.emit_line(f"if ({cond}) {{") + elif rtype.error_overlap: + # The error value is also valid as a normal value, so we need to also check + # for a raised exception. + self.emit_line(f"if ({value} == {self.c_error_value(rtype)} && PyErr_Occurred()) {{") + else: + self.emit_line(f"if ({value} == {self.c_error_value(rtype)}) {{") self.emit_lines(failure, "}") def emit_gc_visit(self, target: str, rtype: RType) -> None: diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 5434b5c01219..a9b51b8ff1a4 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -13,11 +13,12 @@ generate_dunder_wrapper, generate_get_wrapper, generate_hash_wrapper, + generate_ipow_wrapper, generate_len_wrapper, generate_richcompare_wrapper, generate_set_del_item_wrapper, ) -from mypyc.common import NATIVE_PREFIX, PREFIX, REG_PREFIX, use_fastcall +from mypyc.common import BITMAP_BITS, BITMAP_TYPE, NATIVE_PREFIX, PREFIX, REG_PREFIX, use_fastcall from mypyc.ir.class_ir import ClassIR, VTableEntries from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR from mypyc.ir.rtypes import RTuple, RType, object_rprimitive @@ -61,11 +62,15 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: AS_SEQUENCE_SLOT_DEFS: SlotTable = {"__contains__": ("sq_contains", generate_contains_wrapper)} AS_NUMBER_SLOT_DEFS: SlotTable = { + # Unary operations. "__bool__": ("nb_bool", generate_bool_wrapper), - "__neg__": ("nb_negative", generate_dunder_wrapper), - "__invert__": ("nb_invert", generate_dunder_wrapper), "__int__": ("nb_int", generate_dunder_wrapper), "__float__": ("nb_float", generate_dunder_wrapper), + "__neg__": ("nb_negative", generate_dunder_wrapper), + "__pos__": ("nb_positive", generate_dunder_wrapper), + "__abs__": ("nb_absolute", generate_dunder_wrapper), + "__invert__": ("nb_invert", generate_dunder_wrapper), + # Binary operations. "__add__": ("nb_add", generate_bin_op_wrapper), "__radd__": ("nb_add", generate_bin_op_wrapper), "__sub__": ("nb_subtract", generate_bin_op_wrapper), @@ -78,6 +83,8 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: "__rtruediv__": ("nb_true_divide", generate_bin_op_wrapper), "__floordiv__": ("nb_floor_divide", generate_bin_op_wrapper), "__rfloordiv__": ("nb_floor_divide", generate_bin_op_wrapper), + "__divmod__": ("nb_divmod", generate_bin_op_wrapper), + "__rdivmod__": ("nb_divmod", generate_bin_op_wrapper), "__lshift__": ("nb_lshift", generate_bin_op_wrapper), "__rlshift__": ("nb_lshift", generate_bin_op_wrapper), "__rshift__": ("nb_rshift", generate_bin_op_wrapper), @@ -90,6 +97,7 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: "__rxor__": ("nb_xor", generate_bin_op_wrapper), "__matmul__": ("nb_matrix_multiply", generate_bin_op_wrapper), "__rmatmul__": ("nb_matrix_multiply", generate_bin_op_wrapper), + # In-place binary operations. "__iadd__": ("nb_inplace_add", generate_dunder_wrapper), "__isub__": ("nb_inplace_subtract", generate_dunder_wrapper), "__imul__": ("nb_inplace_multiply", generate_dunder_wrapper), @@ -102,6 +110,11 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: "__ior__": ("nb_inplace_or", generate_dunder_wrapper), "__ixor__": ("nb_inplace_xor", generate_dunder_wrapper), "__imatmul__": ("nb_inplace_matrix_multiply", generate_dunder_wrapper), + # Ternary operations. (yes, really) + # These are special cased in generate_bin_op_wrapper(). + "__pow__": ("nb_power", generate_bin_op_wrapper), + "__rpow__": ("nb_power", generate_bin_op_wrapper), + "__ipow__": ("nb_inplace_power", generate_ipow_wrapper), } AS_ASYNC_SLOT_DEFS: SlotTable = { @@ -326,7 +339,7 @@ def emit_line() -> None: flags.append("_Py_TPFLAGS_HAVE_VECTORCALL") if not fields.get("tp_vectorcall"): # This is just a placeholder to please CPython. It will be - # overriden during setup. + # overridden during setup. fields["tp_call"] = "PyVectorcall_Call" fields["tp_flags"] = " | ".join(flags) @@ -367,8 +380,17 @@ def generate_object_struct(cl: ClassIR, emitter: Emitter) -> None: lines += ["typedef struct {", "PyObject_HEAD", "CPyVTableItem *vtable;"] if cl.has_method("__call__") and emitter.use_vectorcall(): lines.append("vectorcallfunc vectorcall;") + bitmap_attrs = [] for base in reversed(cl.base_mro): if not base.is_trait: + if base.bitmap_attrs: + # Do we need another attribute bitmap field? + if emitter.bitmap_field(len(base.bitmap_attrs) - 1) not in bitmap_attrs: + for i in range(0, len(base.bitmap_attrs), BITMAP_BITS): + attr = emitter.bitmap_field(i) + if attr not in bitmap_attrs: + lines.append(f"{BITMAP_TYPE} {attr};") + bitmap_attrs.append(attr) for attr, rtype in base.attributes.items(): if (attr, rtype) not in seen_attrs: lines.append(f"{emitter.ctype_spaced(rtype)}{emitter.attr(attr)};") @@ -546,6 +568,9 @@ def generate_setup_for_class( emitter.emit_line("}") else: emitter.emit_line(f"self->vtable = {vtable_name};") + for i in range(0, len(cl.bitmap_attrs), BITMAP_BITS): + field = emitter.bitmap_field(i) + emitter.emit_line(f"self->{field} = 0;") if cl.has_method("__call__") and emitter.use_vectorcall(): name = cl.method_decl("__call__").cname(emitter.names) @@ -807,7 +832,10 @@ def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: ) ) - for prop in cl.properties: + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + # Generate getter declaration emitter.emit_line("static PyObject *") emitter.emit_line( @@ -817,7 +845,7 @@ def generate_getseter_declarations(cl: ClassIR, emitter: Emitter) -> None: ) # Generate property setter declaration if a setter exists - if cl.properties[prop][1]: + if setter: emitter.emit_line("static int") emitter.emit_line( "{}({} *self, PyObject *value, void *closure);".format( @@ -837,11 +865,13 @@ def generate_getseters_table(cl: ClassIR, name: str, emitter: Emitter) -> None: ) ) emitter.emit_line(" NULL, NULL},") - for prop in cl.properties: + for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + emitter.emit_line(f'{{"{prop}",') emitter.emit_line(f" (getter){getter_name(cl, prop, emitter.names)},") - setter = cl.properties[prop][1] if setter: emitter.emit_line(f" (setter){setter_name(cl, prop, emitter.names)},") emitter.emit_line("NULL, NULL},") @@ -861,6 +891,9 @@ def generate_getseters(cl: ClassIR, emitter: Emitter) -> None: if i < len(cl.attributes) - 1: emitter.emit_line("") for prop, (getter, setter) in cl.properties.items(): + if getter.decl.implicit: + continue + rtype = getter.sig.ret_type emitter.emit_line("") generate_readonly_getter(cl, prop, rtype, getter, emitter) @@ -887,7 +920,7 @@ def generate_getter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> N always_defined = cl.is_always_defined(attr) and not rtype.is_refcounted if not always_defined: - emitter.emit_undefined_attr_check(rtype, attr_expr, "==", unlikely=True) + emitter.emit_undefined_attr_check(rtype, attr_expr, "==", "self", attr, cl, unlikely=True) emitter.emit_line("PyErr_SetString(PyExc_AttributeError,") emitter.emit_line(f' "attribute {repr(attr)} of {repr(cl.name)} undefined");') emitter.emit_line("return NULL;") @@ -926,7 +959,7 @@ def generate_setter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> N if rtype.is_refcounted: attr_expr = f"self->{attr_field}" if not always_defined: - emitter.emit_undefined_attr_check(rtype, attr_expr, "!=") + emitter.emit_undefined_attr_check(rtype, attr_expr, "!=", "self", attr, cl) emitter.emit_dec_ref(f"self->{attr_field}", rtype) if not always_defined: emitter.emit_line("}") @@ -943,9 +976,14 @@ def generate_setter(cl: ClassIR, attr: str, rtype: RType, emitter: Emitter) -> N emitter.emit_lines("if (!tmp)", " return -1;") emitter.emit_inc_ref("tmp", rtype) emitter.emit_line(f"self->{attr_field} = tmp;") + if rtype.error_overlap and not always_defined: + emitter.emit_attr_bitmap_set("tmp", "self", rtype, cl, attr) + if deletable: emitter.emit_line("} else") emitter.emit_line(f" self->{attr_field} = {emitter.c_undefined_value(rtype)};") + if rtype.error_overlap: + emitter.emit_attr_bitmap_clear("self", rtype, cl, attr) emitter.emit_line("return 0;") emitter.emit_line("}") diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index c0aaff2c5f99..e7fb7db80413 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -330,7 +330,8 @@ def visit_get_attr(self, op: GetAttr) -> None: rtype = op.class_type cl = rtype.class_ir attr_rtype, decl_cl = cl.attr_details(op.attr) - if cl.get_method(op.attr): + prefer_method = cl.is_trait and attr_rtype.error_overlap + if cl.get_method(op.attr, prefer_method=prefer_method): # Properties are essentially methods, so use vtable access for them. version = "_TRAIT" if cl.is_trait else "" self.emit_line( @@ -353,7 +354,9 @@ def visit_get_attr(self, op: GetAttr) -> None: always_defined = cl.is_always_defined(op.attr) merged_branch = None if not always_defined: - self.emitter.emit_undefined_attr_check(attr_rtype, dest, "==", unlikely=True) + self.emitter.emit_undefined_attr_check( + attr_rtype, dest, "==", obj, op.attr, cl, unlikely=True + ) branch = self.next_branch() if branch is not None: if ( @@ -429,14 +432,21 @@ def visit_set_attr(self, op: SetAttr) -> None: # ...and struct access for normal attributes. attr_expr = self.get_attr_expr(obj, op, decl_cl) if not op.is_init and attr_rtype.is_refcounted: - # This is not an initalization (where we know that the attribute was + # This is not an initialization (where we know that the attribute was # previously undefined), so decref the old value. always_defined = cl.is_always_defined(op.attr) if not always_defined: - self.emitter.emit_undefined_attr_check(attr_rtype, attr_expr, "!=") + self.emitter.emit_undefined_attr_check( + attr_rtype, attr_expr, "!=", obj, op.attr, cl + ) self.emitter.emit_dec_ref(attr_expr, attr_rtype) if not always_defined: self.emitter.emit_line("}") + elif attr_rtype.error_overlap and not cl.is_always_defined(op.attr): + # If there is overlap with the error value, update bitmap to mark + # attribute as defined. + self.emitter.emit_attr_bitmap_set(src, obj, attr_rtype, cl, op.attr) + # This steals the reference to src, so we don't need to increment the arg self.emitter.emit_line(f"{attr_expr} = {src};") if op.error_kind == ERR_FALSE: diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 005c0f764e9a..9f65aa77c47f 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -409,7 +409,6 @@ def compile_modules_to_c( compiler_options: The compilation options errors: Where to report any errors encountered groups: The groups that we are compiling. See documentation of Groups type above. - ops: Optionally, where to dump stringified ops for debugging. Returns the IR of the modules and a list containing the generated files for each group. """ @@ -419,7 +418,9 @@ def compile_modules_to_c( # Sometimes when we call back into mypy, there might be errors. # We don't want to crash when that happens. - result.manager.errors.set_file("", module=None, scope=None) + result.manager.errors.set_file( + "", module=None, scope=None, options=result.manager.options + ) modules = compile_modules_to_ir(result, mapper, compiler_options, errors) ctext = compile_ir_to_c(groups, modules, result, mapper, compiler_options) @@ -668,6 +669,9 @@ def generate_literal_tables(self) -> None: # Descriptions of tuple literals init_tuple = c_array_initializer(literals.encoded_tuple_values()) self.declare_global("const int []", "CPyLit_Tuple", initializer=init_tuple) + # Descriptions of frozenset literals + init_frozenset = c_array_initializer(literals.encoded_frozenset_values()) + self.declare_global("const int []", "CPyLit_FrozenSet", initializer=init_frozenset) def generate_export_table(self, decl_emitter: Emitter, code_emitter: Emitter) -> None: """Generate the declaration and definition of the group's export struct. @@ -838,7 +842,7 @@ def generate_globals_init(self, emitter: Emitter) -> None: for symbol, fixup in self.simple_inits: emitter.emit_line(f"{symbol} = {fixup};") - values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple" + values = "CPyLit_Str, CPyLit_Bytes, CPyLit_Int, CPyLit_Float, CPyLit_Complex, CPyLit_Tuple, CPyLit_FrozenSet" emitter.emit_lines( f"if (CPyStatics_Initialize(CPyStatics, {values}) < 0) {{", "return -1;", "}" ) diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index a296ce271d07..ed03bb7948cc 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -17,7 +17,15 @@ from mypy.nodes import ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, ArgKind from mypy.operators import op_methods_to_symbols, reverse_op_method_names, reverse_op_methods from mypyc.codegen.emit import AssignHandler, Emitter, ErrorHandler, GotoHandler, ReturnHandler -from mypyc.common import DUNDER_PREFIX, NATIVE_PREFIX, PREFIX, use_vectorcall +from mypyc.common import ( + BITMAP_BITS, + BITMAP_TYPE, + DUNDER_PREFIX, + NATIVE_PREFIX, + PREFIX, + bitmap_name, + use_vectorcall, +) from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FUNC_STATICMETHOD, FuncIR, RuntimeArg from mypyc.ir.rtypes import ( @@ -135,6 +143,8 @@ def generate_wrapper_function( # If fn is a method, then the first argument is a self param real_args = list(fn.args) + if fn.sig.num_bitmap_args: + real_args = real_args[: -fn.sig.num_bitmap_args] if fn.class_name and not fn.decl.kind == FUNC_STATICMETHOD: arg = real_args.pop(0) emitter.emit_line(f"PyObject *obj_{arg.name} = self;") @@ -185,6 +195,9 @@ def generate_wrapper_function( "return NULL;", "}", ) + for i in range(fn.sig.num_bitmap_args): + name = bitmap_name(i) + emitter.emit_line(f"{BITMAP_TYPE} {name} = 0;") traceback_code = generate_traceback_code(fn, emitter, source_path, module_name) generate_wrapper_core( fn, @@ -223,6 +236,8 @@ def generate_legacy_wrapper_function( # If fn is a method, then the first argument is a self param real_args = list(fn.args) + if fn.sig.num_bitmap_args: + real_args = real_args[: -fn.sig.num_bitmap_args] if fn.class_name and not fn.decl.kind == FUNC_STATICMETHOD: arg = real_args.pop(0) emitter.emit_line(f"PyObject *obj_{arg.name} = self;") @@ -254,6 +269,9 @@ def generate_legacy_wrapper_function( "return NULL;", "}", ) + for i in range(fn.sig.num_bitmap_args): + name = bitmap_name(i) + emitter.emit_line(f"{BITMAP_TYPE} {name} = 0;") traceback_code = generate_traceback_code(fn, emitter, source_path, module_name) generate_wrapper_core( fn, @@ -283,6 +301,32 @@ def generate_dunder_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: return gen.wrapper_name() +def generate_ipow_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: + """Generate a wrapper for native __ipow__. + + Since __ipow__ fills a ternary slot, but almost no one defines __ipow__ to take three + arguments, the wrapper needs to tweaked to force it to accept three arguments. + """ + gen = WrapperGenerator(cl, emitter) + gen.set_target(fn) + assert len(fn.args) in (2, 3), "__ipow__ should only take 2 or 3 arguments" + gen.arg_names = ["self", "exp", "mod"] + gen.emit_header() + gen.emit_arg_processing() + handle_third_pow_argument( + fn, + emitter, + gen, + if_unsupported=[ + 'PyErr_SetString(PyExc_TypeError, "__ipow__ takes 2 positional arguments but 3 were given");', + "return NULL;", + ], + ) + gen.emit_call() + gen.finish() + return gen.wrapper_name() + + def generate_bin_op_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """Generates a wrapper for a native binary dunder method. @@ -293,13 +337,16 @@ def generate_bin_op_wrapper(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: """ gen = WrapperGenerator(cl, emitter) gen.set_target(fn) - gen.arg_names = ["left", "right"] + if fn.name in ("__pow__", "__rpow__"): + gen.arg_names = ["left", "right", "mod"] + else: + gen.arg_names = ["left", "right"] wrapper_name = gen.wrapper_name() gen.emit_header() if fn.name not in reverse_op_methods and fn.name in reverse_op_method_names: # There's only a reverse operator method. - generate_bin_op_reverse_only_wrapper(emitter, gen) + generate_bin_op_reverse_only_wrapper(fn, emitter, gen) else: rmethod = reverse_op_methods[fn.name] fn_rev = cl.get_method(rmethod) @@ -316,6 +363,7 @@ def generate_bin_op_forward_only_wrapper( fn: FuncIR, emitter: Emitter, gen: WrapperGenerator ) -> None: gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail;"]) gen.emit_call(not_implemented_handler="goto typefail;") gen.emit_error_handling() emitter.emit_label("typefail") @@ -334,19 +382,16 @@ def generate_bin_op_forward_only_wrapper( # if not isinstance(other, int): # return NotImplemented # ... - rmethod = reverse_op_methods[fn.name] - emitter.emit_line(f"_Py_IDENTIFIER({rmethod});") - emitter.emit_line( - 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( - op_methods_to_symbols[fn.name], rmethod - ) - ) + generate_bin_op_reverse_dunder_call(fn, emitter, reverse_op_methods[fn.name]) gen.finish() -def generate_bin_op_reverse_only_wrapper(emitter: Emitter, gen: WrapperGenerator) -> None: +def generate_bin_op_reverse_only_wrapper( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator +) -> None: gen.arg_names = ["right", "left"] gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail;"]) gen.emit_call() gen.emit_error_handling() emitter.emit_label("typefail") @@ -372,7 +417,14 @@ def generate_bin_op_both_wrappers( ) ) gen.emit_arg_processing(error=GotoHandler("typefail"), raise_exception=False) - gen.emit_call(not_implemented_handler="goto typefail;") + handle_third_pow_argument(fn, emitter, gen, if_unsupported=["goto typefail2;"]) + # Ternary __rpow__ calls aren't a thing so immediately bail + # if ternary __pow__ returns NotImplemented. + if fn.name == "__pow__" and len(fn.args) == 3: + fwd_not_implemented_handler = "goto typefail2;" + else: + fwd_not_implemented_handler = "goto typefail;" + gen.emit_call(not_implemented_handler=fwd_not_implemented_handler) gen.emit_error_handling() emitter.emit_line("}") emitter.emit_label("typefail") @@ -384,15 +436,11 @@ def generate_bin_op_both_wrappers( gen.set_target(fn_rev) gen.arg_names = ["right", "left"] gen.emit_arg_processing(error=GotoHandler("typefail2"), raise_exception=False) + handle_third_pow_argument(fn_rev, emitter, gen, if_unsupported=["goto typefail2;"]) gen.emit_call() gen.emit_error_handling() emitter.emit_line("} else {") - emitter.emit_line(f"_Py_IDENTIFIER({fn_rev.name});") - emitter.emit_line( - 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( - op_methods_to_symbols[fn.name], fn_rev.name - ) - ) + generate_bin_op_reverse_dunder_call(fn, emitter, fn_rev.name) emitter.emit_line("}") emitter.emit_label("typefail2") emitter.emit_line("Py_INCREF(Py_NotImplemented);") @@ -400,6 +448,47 @@ def generate_bin_op_both_wrappers( gen.finish() +def generate_bin_op_reverse_dunder_call(fn: FuncIR, emitter: Emitter, rmethod: str) -> None: + if fn.name in ("__pow__", "__rpow__"): + # Ternary pow() will never call the reverse dunder. + emitter.emit_line("if (obj_mod == Py_None) {") + emitter.emit_line(f"_Py_IDENTIFIER({rmethod});") + emitter.emit_line( + 'return CPy_CallReverseOpMethod(obj_left, obj_right, "{}", &PyId_{});'.format( + op_methods_to_symbols[fn.name], rmethod + ) + ) + if fn.name in ("__pow__", "__rpow__"): + emitter.emit_line("} else {") + emitter.emit_line("Py_INCREF(Py_NotImplemented);") + emitter.emit_line("return Py_NotImplemented;") + emitter.emit_line("}") + + +def handle_third_pow_argument( + fn: FuncIR, emitter: Emitter, gen: WrapperGenerator, *, if_unsupported: list[str] +) -> None: + if fn.name not in ("__pow__", "__rpow__", "__ipow__"): + return + + if (fn.name in ("__pow__", "__ipow__") and len(fn.args) == 2) or fn.name == "__rpow__": + # If the power dunder only supports two arguments and the third + # argument (AKA mod) is set to a non-default value, simply bail. + # + # Importantly, this prevents any ternary __rpow__ calls from + # happening (as per the language specification). + emitter.emit_line("if (obj_mod != Py_None) {") + for line in if_unsupported: + emitter.emit_line(line) + emitter.emit_line("}") + # The slot wrapper will receive three arguments, but the call only + # supports two so make sure that the third argument isn't passed + # along. This is needed as two-argument __(i)pow__ is allowed and + # rather common. + if len(gen.arg_names) == 3: + gen.arg_names.pop() + + RICHCOMPARE_OPS = { "__lt__": "Py_LT", "__gt__": "Py_GT", @@ -669,7 +758,8 @@ def generate_wrapper_core( """ gen = WrapperGenerator(None, emitter) gen.set_target(fn) - gen.arg_names = arg_names or [arg.name for arg in fn.args] + if arg_names: + gen.arg_names = arg_names gen.cleanups = cleanups or [] gen.optional_args = optional_args or [] gen.traceback_code = traceback_code or "" @@ -688,6 +778,7 @@ def generate_arg_check( *, optional: bool = False, raise_exception: bool = True, + bitmap_arg_index: int = 0, ) -> None: """Insert a runtime check for argument and unbox if necessary. @@ -697,17 +788,35 @@ def generate_arg_check( """ error = error or AssignHandler() if typ.is_unboxed: - # Borrow when unboxing to avoid reference count manipulation. - emitter.emit_unbox( - f"obj_{name}", - f"arg_{name}", - typ, - declare_dest=True, - raise_exception=raise_exception, - error=error, - borrow=True, - optional=optional, - ) + if typ.error_overlap and optional: + # Update bitmap is value is provided. + init = emitter.c_undefined_value(typ) + emitter.emit_line(f"{emitter.ctype(typ)} arg_{name} = {init};") + emitter.emit_line(f"if (obj_{name} != NULL) {{") + bitmap = bitmap_name(bitmap_arg_index // BITMAP_BITS) + emitter.emit_line(f"{bitmap} |= 1 << {bitmap_arg_index & (BITMAP_BITS - 1)};") + emitter.emit_unbox( + f"obj_{name}", + f"arg_{name}", + typ, + declare_dest=False, + raise_exception=raise_exception, + error=error, + borrow=True, + ) + emitter.emit_line("}") + else: + # Borrow when unboxing to avoid reference count manipulation. + emitter.emit_unbox( + f"obj_{name}", + f"arg_{name}", + typ, + declare_dest=True, + raise_exception=raise_exception, + error=error, + borrow=True, + optional=optional, + ) elif is_object_rprimitive(typ): # Object is trivial since any object is valid if optional: @@ -749,8 +858,12 @@ def set_target(self, fn: FuncIR) -> None: """ self.target_name = fn.name self.target_cname = fn.cname(self.emitter.names) - self.arg_names = [arg.name for arg in fn.args] - self.args = fn.args[:] + self.num_bitmap_args = fn.sig.num_bitmap_args + if self.num_bitmap_args: + self.args = fn.args[: -self.num_bitmap_args] + else: + self.args = fn.args + self.arg_names = [arg.name for arg in self.args] self.ret_type = fn.ret_type def wrapper_name(self) -> str: @@ -779,17 +892,22 @@ def emit_arg_processing( ) -> None: """Emit validation and unboxing of arguments.""" error = error or self.error() + bitmap_arg_index = 0 for arg_name, arg in zip(self.arg_names, self.args): # Suppress the argument check for *args/**kwargs, since we know it must be right. typ = arg.type if arg.kind not in (ARG_STAR, ARG_STAR2) else object_rprimitive + optional = arg in self.optional_args generate_arg_check( arg_name, typ, self.emitter, error, raise_exception=raise_exception, - optional=arg in self.optional_args, + optional=optional, + bitmap_arg_index=bitmap_arg_index, ) + if optional and typ.error_overlap: + bitmap_arg_index += 1 def emit_call(self, not_implemented_handler: str = "") -> None: """Emit call to the wrapper function. @@ -798,6 +916,12 @@ def emit_call(self, not_implemented_handler: str = "") -> None: a NotImplemented return value (if it's possible based on the return type). """ native_args = ", ".join(f"arg_{arg}" for arg in self.arg_names) + if self.num_bitmap_args: + bitmap_args = ", ".join( + [bitmap_name(i) for i in reversed(range(self.num_bitmap_args))] + ) + native_args = f"{native_args}, {bitmap_args}" + ret_type = self.ret_type emitter = self.emitter if ret_type.is_unboxed or self.use_goto(): diff --git a/mypyc/codegen/literals.py b/mypyc/codegen/literals.py index 29957d52101c..05884b754452 100644 --- a/mypyc/codegen/literals.py +++ b/mypyc/codegen/literals.py @@ -1,12 +1,13 @@ from __future__ import annotations -from typing import Any, Tuple, Union, cast +from typing import Any, FrozenSet, List, Tuple, Union, cast from typing_extensions import Final -# Supported Python literal types. All tuple items must have supported +# Supported Python literal types. All tuple / frozenset items must have supported # literal types as well, but we can't represent the type precisely. -LiteralValue = Union[str, bytes, int, bool, float, complex, Tuple[object, ...], None] - +LiteralValue = Union[ + str, bytes, int, bool, float, complex, Tuple[object, ...], FrozenSet[object], None +] # Some literals are singletons and handled specially (None, False and True) NUM_SINGLETONS: Final = 3 @@ -23,6 +24,7 @@ def __init__(self) -> None: self.float_literals: dict[float, int] = {} self.complex_literals: dict[complex, int] = {} self.tuple_literals: dict[tuple[object, ...], int] = {} + self.frozenset_literals: dict[frozenset[object], int] = {} def record_literal(self, value: LiteralValue) -> None: """Ensure that the literal value is available in generated code.""" @@ -55,6 +57,12 @@ def record_literal(self, value: LiteralValue) -> None: for item in value: self.record_literal(cast(Any, item)) tuple_literals[value] = len(tuple_literals) + elif isinstance(value, frozenset): + frozenset_literals = self.frozenset_literals + if value not in frozenset_literals: + for item in value: + self.record_literal(cast(Any, item)) + frozenset_literals[value] = len(frozenset_literals) else: assert False, "invalid literal: %r" % value @@ -86,6 +94,9 @@ def literal_index(self, value: LiteralValue) -> int: n += len(self.complex_literals) if isinstance(value, tuple): return n + self.tuple_literals[value] + n += len(self.tuple_literals) + if isinstance(value, frozenset): + return n + self.frozenset_literals[value] assert False, "invalid literal: %r" % value def num_literals(self) -> int: @@ -98,6 +109,7 @@ def num_literals(self) -> int: + len(self.float_literals) + len(self.complex_literals) + len(self.tuple_literals) + + len(self.frozenset_literals) ) # The following methods return the C encodings of literal values @@ -119,24 +131,31 @@ def encoded_complex_values(self) -> list[str]: return _encode_complex_values(self.complex_literals) def encoded_tuple_values(self) -> list[str]: - """Encode tuple values into a C array. + return self._encode_collection_values(self.tuple_literals) + + def encoded_frozenset_values(self) -> List[str]: + return self._encode_collection_values(self.frozenset_literals) + + def _encode_collection_values( + self, values: dict[tuple[object, ...], int] | dict[frozenset[object], int] + ) -> list[str]: + """Encode tuple/frozenset values into a C array. The format of the result is like this: - - + + ... - + ... """ - values = self.tuple_literals value_by_index = {index: value for value, index in values.items()} result = [] - num = len(values) - result.append(str(num)) - for i in range(num): + count = len(values) + result.append(str(count)) + for i in range(count): value = value_by_index[i] result.append(str(len(value))) for item in value: diff --git a/mypyc/common.py b/mypyc/common.py index e9b59246898b..c8da5ff63bab 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -1,6 +1,7 @@ from __future__ import annotations import sys +import sysconfig from typing import Any, Dict from typing_extensions import Final @@ -30,27 +31,38 @@ # Maximal number of subclasses for a class to trigger fast path in isinstance() checks. FAST_ISINSTANCE_MAX_SUBCLASSES: Final = 2 -IS_32_BIT_PLATFORM: Final = sys.maxsize < (1 << 31) +# Size of size_t, if configured. +SIZEOF_SIZE_T_SYSCONFIG: Final = sysconfig.get_config_var("SIZEOF_SIZE_T") -PLATFORM_SIZE = 4 if IS_32_BIT_PLATFORM else 8 +SIZEOF_SIZE_T: Final = ( + int(SIZEOF_SIZE_T_SYSCONFIG) + if SIZEOF_SIZE_T_SYSCONFIG is not None + else (sys.maxsize + 1).bit_length() // 8 +) -# Python 3.5 on macOS uses a hybrid 32/64-bit build that requires some workarounds. -# The same generated C will be compiled in both 32 and 64 bit modes when building mypy -# wheels (for an unknown reason). -# -# Note that we use "in ['darwin']" because of https://github.com/mypyc/mypyc/issues/761. -IS_MIXED_32_64_BIT_BUILD: Final = sys.platform in ["darwin"] and sys.version_info < (3, 6) +IS_32_BIT_PLATFORM: Final = int(SIZEOF_SIZE_T) == 4 + +PLATFORM_SIZE = 4 if IS_32_BIT_PLATFORM else 8 # Maximum value for a short tagged integer. -MAX_SHORT_INT: Final = sys.maxsize >> 1 +MAX_SHORT_INT: Final = 2 ** (8 * int(SIZEOF_SIZE_T) - 2) - 1 + +# Minimum value for a short tagged integer. +MIN_SHORT_INT: Final = -(MAX_SHORT_INT) - 1 # Maximum value for a short tagged integer represented as a C integer literal. # -# Note: Assume that the compiled code uses the same bit width as mypyc, except for -# Python 3.5 on macOS. -MAX_LITERAL_SHORT_INT: Final = sys.maxsize >> 1 if not IS_MIXED_32_64_BIT_BUILD else 2**30 - 1 +# Note: Assume that the compiled code uses the same bit width as mypyc +MAX_LITERAL_SHORT_INT: Final = MAX_SHORT_INT MIN_LITERAL_SHORT_INT: Final = -MAX_LITERAL_SHORT_INT - 1 +# Description of the C type used to track the definedness of attributes and +# the presence of argument default values that have types with overlapping +# error values. Each tracked attribute/argument has a dedicated bit in the +# relevant bitmap. +BITMAP_TYPE: Final = "uint32_t" +BITMAP_BITS: Final = 32 + # Runtime C library files RUNTIME_C_FILES: Final = [ "init.c", @@ -121,3 +133,9 @@ def short_id_from_name(func_name: str, shortname: str, line: int | None) -> str: else: partial_name = shortname return partial_name + + +def bitmap_name(index: int) -> str: + if index == 0: + return "__bitmap" + return f"__bitmap{index + 1}" diff --git a/mypyc/doc/conf.py b/mypyc/doc/conf.py index 2077c04f093c..da887e0d8267 100644 --- a/mypyc/doc/conf.py +++ b/mypyc/doc/conf.py @@ -36,7 +36,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [] # type: ignore +extensions = [] # type: ignore[var-annotated] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/mypyc/doc/native_operations.rst b/mypyc/doc/native_operations.rst index 896217063fee..2587e982feac 100644 --- a/mypyc/doc/native_operations.rst +++ b/mypyc/doc/native_operations.rst @@ -24,6 +24,7 @@ Functions * ``cast(, obj)`` * ``type(obj)`` * ``len(obj)`` +* ``abs(obj)`` * ``id(obj)`` * ``iter(obj)`` * ``next(iter: Iterator)`` diff --git a/mypyc/doc/using_type_annotations.rst b/mypyc/doc/using_type_annotations.rst index be596fc23210..a01246ab0914 100644 --- a/mypyc/doc/using_type_annotations.rst +++ b/mypyc/doc/using_type_annotations.rst @@ -304,7 +304,7 @@ Example:: def example() -> None: # A small integer uses the value (unboxed) representation x = 5 - # A large integer the the heap (boxed) representation + # A large integer uses the heap (boxed) representation x = 2**500 # Lists always contain boxed integers a = [55] diff --git a/mypyc/errors.py b/mypyc/errors.py index 2fb07c10827a..1dd269fe25f3 100644 --- a/mypyc/errors.py +++ b/mypyc/errors.py @@ -7,7 +7,7 @@ class Errors: def __init__(self) -> None: self.num_errors = 0 self.num_warnings = 0 - self._errors = mypy.errors.Errors() + self._errors = mypy.errors.Errors(hide_error_codes=True) def error(self, msg: str, path: str, line: int) -> None: self._errors.report(line, None, msg, severity="error", file=path) diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index dca19e5a2e3c..a1534780b79b 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -69,10 +69,11 @@ # placed in the class's shadow vtable (if it has one). -VTableMethod = NamedTuple( - "VTableMethod", - [("cls", "ClassIR"), ("name", str), ("method", FuncIR), ("shadow_method", Optional[FuncIR])], -) +class VTableMethod(NamedTuple): + cls: "ClassIR" + name: str + method: FuncIR + shadow_method: Optional[FuncIR] VTableEntries = List[VTableMethod] @@ -130,7 +131,7 @@ def __init__( self.builtin_base: str | None = None # Default empty constructor self.ctor = FuncDecl(name, None, module_name, FuncSignature([], RInstance(self))) - + # Attributes defined in the class (not inherited) self.attributes: dict[str, RType] = {} # Deletable attributes self.deletable: list[str] = [] @@ -184,6 +185,13 @@ def __init__( # If True, __init__ can make 'self' visible to unanalyzed/arbitrary code self.init_self_leak = False + # Definedness of these attributes is backed by a bitmap. Index in the list + # indicates the bit number. Includes inherited attributes. We need the + # bitmap for types such as native ints that can't have a dedicated error + # value that doesn't overlap a valid value. The bitmap is used if the + # value of an attribute is the same as the error value. + self.bitmap_attrs: List[str] = [] + def __repr__(self) -> str: return ( "ClassIR(" @@ -258,10 +266,7 @@ def has_attr(self, name: str) -> bool: return True def is_deletable(self, name: str) -> bool: - for ir in self.mro: - if name in ir.deletable: - return True - return False + return any(name in ir.deletable for ir in self.mro) def is_always_defined(self, name: str) -> bool: if self.is_deletable(name): @@ -274,17 +279,28 @@ def name_prefix(self, names: NameGenerator) -> str: def struct_name(self, names: NameGenerator) -> str: return f"{exported_name(self.fullname)}Object" - def get_method_and_class(self, name: str) -> tuple[FuncIR, ClassIR] | None: + def get_method_and_class( + self, name: str, *, prefer_method: bool = False + ) -> tuple[FuncIR, ClassIR] | None: for ir in self.mro: if name in ir.methods: - return ir.methods[name], ir + func_ir = ir.methods[name] + if not prefer_method and func_ir.decl.implicit: + # This is an implicit accessor, so there is also an attribute definition + # which the caller prefers. This happens if an attribute overrides a + # property. + return None + return func_ir, ir return None - def get_method(self, name: str) -> FuncIR | None: - res = self.get_method_and_class(name) + def get_method(self, name: str, *, prefer_method: bool = False) -> FuncIR | None: + res = self.get_method_and_class(name, prefer_method=prefer_method) return res[0] if res else None + def has_method_decl(self, name: str) -> bool: + return any(name in ir.method_decls for ir in self.mro) + def subclasses(self) -> set[ClassIR] | None: """Return all subclasses of this class, both direct and indirect. diff --git a/mypyc/ir/func_ir.py b/mypyc/ir/func_ir.py index 82ce23402d10..dbb45fc7ec29 100644 --- a/mypyc/ir/func_ir.py +++ b/mypyc/ir/func_ir.py @@ -6,7 +6,7 @@ from typing_extensions import Final from mypy.nodes import ARG_POS, ArgKind, Block, FuncDef -from mypyc.common import JsonDict, get_id_from_name, short_id_from_name +from mypyc.common import BITMAP_BITS, JsonDict, bitmap_name, get_id_from_name, short_id_from_name from mypyc.ir.ops import ( Assign, AssignMulti, @@ -17,7 +17,7 @@ Register, Value, ) -from mypyc.ir.rtypes import RType, deserialize_type +from mypyc.ir.rtypes import RType, bitmap_rprimitive, deserialize_type from mypyc.namegen import NameGenerator @@ -70,12 +70,37 @@ class FuncSignature: def __init__(self, args: Sequence[RuntimeArg], ret_type: RType) -> None: self.args = tuple(args) self.ret_type = ret_type + # Bitmap arguments are use to mark default values for arguments that + # have types with overlapping error values. + self.num_bitmap_args = num_bitmap_args(self.args) + if self.num_bitmap_args: + extra = [ + RuntimeArg(bitmap_name(i), bitmap_rprimitive, pos_only=True) + for i in range(self.num_bitmap_args) + ] + self.args = self.args + tuple(reversed(extra)) + + def real_args(self) -> tuple[RuntimeArg, ...]: + """Return arguments without any synthetic bitmap arguments.""" + if self.num_bitmap_args: + return self.args[: -self.num_bitmap_args] + return self.args + + def bound_sig(self) -> "FuncSignature": + if self.num_bitmap_args: + return FuncSignature(self.args[1 : -self.num_bitmap_args], self.ret_type) + else: + return FuncSignature(self.args[1:], self.ret_type) def __repr__(self) -> str: return f"FuncSignature(args={self.args!r}, ret={self.ret_type!r})" def serialize(self) -> JsonDict: - return {"args": [t.serialize() for t in self.args], "ret_type": self.ret_type.serialize()} + if self.num_bitmap_args: + args = self.args[: -self.num_bitmap_args] + else: + args = self.args + return {"args": [t.serialize() for t in args], "ret_type": self.ret_type.serialize()} @classmethod def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncSignature: @@ -85,6 +110,14 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncSignature: ) +def num_bitmap_args(args: tuple[RuntimeArg, ...]) -> int: + n = 0 + for arg in args: + if arg.type.error_overlap and arg.kind.is_optional(): + n += 1 + return (n + (BITMAP_BITS - 1)) // BITMAP_BITS + + FUNC_NORMAL: Final = 0 FUNC_STATICMETHOD: Final = 1 FUNC_CLASSMETHOD: Final = 2 @@ -106,6 +139,7 @@ def __init__( kind: int = FUNC_NORMAL, is_prop_setter: bool = False, is_prop_getter: bool = False, + implicit: bool = False, ) -> None: self.name = name self.class_name = class_name @@ -120,9 +154,13 @@ def __init__( if kind == FUNC_STATICMETHOD: self.bound_sig = sig else: - self.bound_sig = FuncSignature(sig.args[1:], sig.ret_type) + self.bound_sig = sig.bound_sig() + + # If True, not present in the mypy AST and must be synthesized during irbuild + # Currently only supported for property getters/setters + self.implicit = implicit - # this is optional because this will be set to the line number when the corresponding + # This is optional because this will be set to the line number when the corresponding # FuncIR is created self._line: int | None = None @@ -165,6 +203,7 @@ def serialize(self) -> JsonDict: "kind": self.kind, "is_prop_setter": self.is_prop_setter, "is_prop_getter": self.is_prop_getter, + "implicit": self.implicit, } # TODO: move this to FuncIR? @@ -186,6 +225,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> FuncDecl: data["kind"], data["is_prop_setter"], data["is_prop_getter"], + data["implicit"], ) diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 56a1e6103acf..51a0bffcf3f1 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -28,7 +28,6 @@ int_rprimitive, is_bit_rprimitive, is_bool_rprimitive, - is_fixed_width_rtype, is_int_rprimitive, is_none_rprimitive, is_pointer_rprimitive, @@ -40,6 +39,7 @@ ) if TYPE_CHECKING: + from mypyc.codegen.literals import LiteralValue from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FuncDecl, FuncIR @@ -589,7 +589,7 @@ class LoadLiteral(RegisterOp): This is used to load a static PyObject * value corresponding to a literal of one of the supported types. - Tuple literals must contain only valid literal values as items. + Tuple / frozenset literals must contain only valid literal values as items. NOTE: You can use this to load boxed (Python) int objects. Use Integer to load unboxed, tagged integers or fixed-width, @@ -604,11 +604,7 @@ class LoadLiteral(RegisterOp): error_kind = ERR_NEVER is_borrowed = True - def __init__( - self, - value: None | str | bytes | bool | int | float | complex | tuple[object, ...], - rtype: RType, - ) -> None: + def __init__(self, value: LiteralValue, rtype: RType) -> None: self.value = value self.type = rtype @@ -632,8 +628,8 @@ def __init__(self, obj: Value, attr: str, line: int, *, borrow: bool = False) -> self.class_type = obj.type attr_type = obj.type.attr_type(attr) self.type = attr_type - if is_fixed_width_rtype(attr_type): - self.error_kind = ERR_NEVER + if attr_type.error_overlap: + self.error_kind = ERR_MAGIC_OVERLAPPING self.is_borrowed = borrow and attr_type.is_refcounted def sources(self) -> list[Value]: @@ -785,7 +781,7 @@ class TupleGet(RegisterOp): error_kind = ERR_NEVER - def __init__(self, src: Value, index: int, line: int) -> None: + def __init__(self, src: Value, index: int, line: int = -1) -> None: super().__init__(line) self.src = src self.index = index @@ -1454,6 +1450,6 @@ def visit_keep_alive(self, op: KeepAlive) -> T: # # (Serialization and deserialization *will* be used for incremental # compilation but so far it is not hooked up to anything.) -DeserMaps = NamedTuple( - "DeserMaps", [("classes", Dict[str, "ClassIR"]), ("functions", Dict[str, "FuncIR"])] -) +class DeserMaps(NamedTuple): + classes: Dict[str, "ClassIR"] + functions: Dict[str, "FuncIR"] diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index 0ef555f86738..cb9e4a2d2541 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -106,7 +106,18 @@ def visit_load_literal(self, op: LoadLiteral) -> str: # it explicit that this is a Python object. if isinstance(op.value, int): prefix = "object " - return self.format("%r = %s%s", op, prefix, repr(op.value)) + + rvalue = repr(op.value) + if isinstance(op.value, frozenset): + # We need to generate a string representation that won't vary + # run-to-run because sets are unordered, otherwise we may get + # spurious irbuild test failures. + # + # Sorting by the item's string representation is a bit of a + # hack, but it's stable and won't cause TypeErrors. + formatted_items = [repr(i) for i in sorted(op.value, key=str)] + rvalue = "frozenset({" + ", ".join(formatted_items) + "})" + return self.format("%r = %s%s", op, prefix, rvalue) def visit_get_attr(self, op: GetAttr) -> str: return self.format("%r = %s%r.%s", op, self.borrow_prefix(op), op.obj, op.attr) @@ -119,6 +130,7 @@ def borrow_prefix(self, op: Op) -> str: def visit_set_attr(self, op: SetAttr) -> str: if op.is_init: assert op.error_kind == ERR_NEVER + if op.error_kind == ERR_NEVER: # Initialization and direct struct access can never fail return self.format("%r.%s = %r", op.obj, op.attr, op.src) else: diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index 9b023da24443..babfe0770f35 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -361,6 +361,9 @@ def __hash__(self) -> int: "c_ptr", is_unboxed=False, is_refcounted=False, ctype="void *" ) +# The type corresponding to mypyc.common.BITMAP_TYPE +bitmap_rprimitive: Final = uint32_rprimitive + # Floats are represent as 'float' PyObject * values. (In the future # we'll likely switch to a more efficient, unboxed representation.) float_rprimitive: Final = RPrimitive("builtins.float", is_unboxed=False, is_refcounted=True) @@ -569,6 +572,7 @@ def __init__(self, types: list[RType]) -> None: # Nominally the max c length is 31 chars, but I'm not honestly worried about this. self.struct_name = f"tuple_{self.unique_id}" self._ctype = f"{self.struct_name}" + self.error_overlap = all(t.error_overlap for t in self.types) and bool(self.types) def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_rtuple(self) @@ -793,6 +797,30 @@ def __init__(self, items: list[RType]) -> None: self.items_set = frozenset(items) self._ctype = "PyObject *" + @staticmethod + def make_simplified_union(items: list[RType]) -> RType: + """Return a normalized union that covers the given items. + + Flatten nested unions and remove duplicate items. + + Overlapping items are *not* simplified. For example, + [object, str] will not be simplified. + """ + items = flatten_nested_unions(items) + assert items + + # Remove duplicate items using set + list to preserve item order + seen = set() + new_items = [] + for item in items: + if item not in seen: + new_items.append(item) + seen.add(item) + if len(new_items) > 1: + return RUnion(new_items) + else: + return new_items[0] + def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_runion(self) @@ -819,6 +847,19 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RUnion: return RUnion(types) +def flatten_nested_unions(types: list[RType]) -> list[RType]: + if not any(isinstance(t, RUnion) for t in types): + return types # Fast path + + flat_items: list[RType] = [] + for t in types: + if isinstance(t, RUnion): + flat_items.extend(flatten_nested_unions(t.items)) + else: + flat_items.append(t) + return flat_items + + def optional_value_type(rtype: RType) -> RType | None: """If rtype is the union of none_rprimitive and another type X, return X. diff --git a/mypyc/irbuild/ast_helpers.py b/mypyc/irbuild/ast_helpers.py index 5b0c58717301..1af1ad611a89 100644 --- a/mypyc/irbuild/ast_helpers.py +++ b/mypyc/irbuild/ast_helpers.py @@ -21,7 +21,7 @@ Var, ) from mypyc.ir.ops import BasicBlock -from mypyc.ir.rtypes import is_tagged +from mypyc.ir.rtypes import is_fixed_width_rtype, is_tagged from mypyc.irbuild.builder import IRBuilder from mypyc.irbuild.constant_fold import constant_fold_expr @@ -70,7 +70,10 @@ def maybe_process_conditional_comparison( return False ltype = self.node_type(e.operands[0]) rtype = self.node_type(e.operands[1]) - if not is_tagged(ltype) or not is_tagged(rtype): + if not ( + (is_tagged(ltype) or is_fixed_width_rtype(ltype)) + and (is_tagged(rtype) or is_fixed_width_rtype(rtype)) + ): return False op = e.operators[0] if op not in ("==", "!=", "<", "<=", ">", ">="): @@ -80,8 +83,17 @@ def maybe_process_conditional_comparison( borrow_left = is_borrow_friendly_expr(self, right_expr) left = self.accept(left_expr, can_borrow=borrow_left) right = self.accept(right_expr, can_borrow=True) - # "left op right" for two tagged integers - self.builder.compare_tagged_condition(left, right, op, true, false, e.line) + if is_fixed_width_rtype(ltype) or is_fixed_width_rtype(rtype): + if not is_fixed_width_rtype(ltype): + left = self.coerce(left, rtype, e.line) + elif not is_fixed_width_rtype(rtype): + right = self.coerce(right, ltype, e.line) + reg = self.binary_op(left, right, op, e.line) + self.builder.flush_keep_alives() + self.add_bool_branch(reg, true, false) + else: + # "left op right" for two tagged integers + self.builder.compare_tagged_condition(left, right, op, true, false, e.line) return True diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index cde12e2a0a75..f37fae608083 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -45,10 +45,20 @@ UnaryExpr, Var, ) -from mypy.types import Instance, TupleType, Type, UninhabitedType, get_proper_type +from mypy.types import ( + AnyType, + Instance, + ProperType, + TupleType, + Type, + TypeOfAny, + UninhabitedType, + UnionType, + get_proper_type, +) from mypy.util import split_target from mypy.visitor import ExpressionVisitor, StatementVisitor -from mypyc.common import SELF_NAME, TEMP_ATTR_NAME +from mypyc.common import BITMAP_BITS, SELF_NAME, TEMP_ATTR_NAME from mypyc.crash import catch_errors from mypyc.errors import Errors from mypyc.ir.class_ir import ClassIR, NonExtClassInfo @@ -58,9 +68,11 @@ Assign, BasicBlock, Branch, + ComparisonOp, GetAttr, InitStatic, Integer, + IntOp, LoadStatic, Op, RaiseStandardError, @@ -74,6 +86,8 @@ RInstance, RTuple, RType, + RUnion, + bitmap_rprimitive, c_int_rprimitive, c_pyssize_t_rprimitive, dict_rprimitive, @@ -104,7 +118,7 @@ AssignmentTargetRegister, AssignmentTargetTuple, ) -from mypyc.irbuild.util import is_constant +from mypyc.irbuild.util import bytes_from_str, is_constant from mypyc.options import CompilerOptions from mypyc.primitives.dict_ops import dict_get_item_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op, next_op, py_setattr_op @@ -282,8 +296,7 @@ def load_bytes_from_str_literal(self, value: str) -> Value: are stored in BytesExpr.value, whose type is 'str' not 'bytes'. Thus we perform a special conversion here. """ - bytes_value = bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") - return self.builder.load_bytes(bytes_value) + return self.builder.load_bytes(bytes_from_str(value)) def load_int(self, value: int) -> Value: return self.builder.load_int(value) @@ -446,6 +459,24 @@ def assign_if_null(self, target: Register, get_val: Callable[[], Value], line: i self.goto(body_block) self.activate_block(body_block) + def assign_if_bitmap_unset( + self, target: Register, get_val: Callable[[], Value], index: int, line: int + ) -> None: + error_block, body_block = BasicBlock(), BasicBlock() + o = self.int_op( + bitmap_rprimitive, + self.builder.args[-1 - index // BITMAP_BITS], + Integer(1 << (index & (BITMAP_BITS - 1)), bitmap_rprimitive), + IntOp.AND, + line, + ) + b = self.add(ComparisonOp(o, Integer(0, bitmap_rprimitive), ComparisonOp.EQ)) + self.add(Branch(b, error_block, body_block, Branch.BOOL)) + self.activate_block(error_block) + self.add(Assign(target, self.coerce(get_val(), target.type, line))) + self.goto(body_block) + self.activate_block(body_block) + def maybe_add_implicit_return(self) -> None: if is_none_rprimitive(self.ret_types[-1]) or is_object_rprimitive(self.ret_types[-1]): self.add_implicit_return() @@ -834,30 +865,53 @@ def extract_int(self, e: Expression) -> int | None: return None def get_sequence_type(self, expr: Expression) -> RType: - target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance) + return self.get_sequence_type_from_type(self.types[expr]) + + def get_sequence_type_from_type(self, target_type: Type) -> RType: + target_type = get_proper_type(target_type) + if isinstance(target_type, UnionType): + return RUnion.make_simplified_union( + [self.get_sequence_type_from_type(item) for item in target_type.items] + ) + assert isinstance(target_type, Instance), target_type if target_type.type.fullname == "builtins.str": return str_rprimitive else: return self.type_to_rtype(target_type.args[0]) - def get_dict_base_type(self, expr: Expression) -> Instance: + def get_dict_base_type(self, expr: Expression) -> list[Instance]: """Find dict type of a dict-like expression. This is useful for dict subclasses like SymbolTable. """ target_type = get_proper_type(self.types[expr]) - assert isinstance(target_type, Instance) - dict_base = next(base for base in target_type.type.mro if base.fullname == "builtins.dict") - return map_instance_to_supertype(target_type, dict_base) + if isinstance(target_type, UnionType): + types = [get_proper_type(item) for item in target_type.items] + else: + types = [target_type] + + dict_types = [] + for t in types: + assert isinstance(t, Instance), t + dict_base = next(base for base in t.type.mro if base.fullname == "builtins.dict") + dict_types.append(map_instance_to_supertype(t, dict_base)) + return dict_types def get_dict_key_type(self, expr: Expression) -> RType: - dict_base_type = self.get_dict_base_type(expr) - return self.type_to_rtype(dict_base_type.args[0]) + dict_base_types = self.get_dict_base_type(expr) + if len(dict_base_types) == 1: + return self.type_to_rtype(dict_base_types[0].args[0]) + else: + rtypes = [self.type_to_rtype(t.args[0]) for t in dict_base_types] + return RUnion.make_simplified_union(rtypes) def get_dict_value_type(self, expr: Expression) -> RType: - dict_base_type = self.get_dict_base_type(expr) - return self.type_to_rtype(dict_base_type.args[1]) + dict_base_types = self.get_dict_base_type(expr) + if len(dict_base_types) == 1: + return self.type_to_rtype(dict_base_types[0].args[1]) + else: + rtypes = [self.type_to_rtype(t.args[1]) for t in dict_base_types] + return RUnion.make_simplified_union(rtypes) def get_dict_item_type(self, expr: Expression) -> RType: key_type = self.get_dict_key_type(expr) @@ -867,7 +921,11 @@ def get_dict_item_type(self, expr: Expression) -> RType: def _analyze_iterable_item_type(self, expr: Expression) -> Type: """Return the item type given by 'expr' in an iterable context.""" # This logic is copied from mypy's TypeChecker.analyze_iterable_item_type. - iterable = get_proper_type(self.types[expr]) + if expr not in self.types: + # Mypy thinks this is unreachable. + iterable: ProperType = AnyType(TypeOfAny.from_error) + else: + iterable = get_proper_type(self.types[expr]) echk = self.graph[self.module_name].type_checker().expr_checker iterator = echk.check_method_call_by_name("__iter__", iterable, [], [], expr)[0] @@ -959,7 +1017,7 @@ def call_refexpr_with_args( ) -> Value: # Handle data-driven special-cased primitive call ops. - if callee.fullname is not None and expr.arg_kinds == [ARG_POS] * len(arg_values): + if callee.fullname and expr.arg_kinds == [ARG_POS] * len(arg_values): call_c_ops_candidates = function_ops.get(callee.fullname, []) target = self.builder.matching_call_c( call_c_ops_candidates, arg_values, expr.line, self.node_type(expr) @@ -984,7 +1042,7 @@ def call_refexpr_with_args( callee_node = callee_node.func if ( callee_node is not None - and callee.fullname is not None + and callee.fullname and callee_node in self.mapper.func_to_decl and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds) ): @@ -1198,7 +1256,7 @@ def load_global(self, expr: NameExpr) -> Value: and isinstance(expr.node, TypeInfo) and not self.is_synthetic_type(expr.node) ): - assert expr.fullname is not None + assert expr.fullname return self.load_native_type_object(expr.fullname) return self.load_global_str(expr.name, expr.line) @@ -1246,6 +1304,7 @@ def gen_arg_defaults(builder: IRBuilder) -> None: value to the argument. """ fitem = builder.fn_info.fitem + nb = 0 for arg in fitem.arguments: if arg.initializer: target = builder.lookup(arg.variable) @@ -1271,7 +1330,14 @@ def get_default() -> Value: ) assert isinstance(target, AssignmentTargetRegister) - builder.assign_if_null(target.register, get_default, arg.initializer.line) + reg = target.register + if not reg.type.error_overlap: + builder.assign_if_null(target.register, get_default, arg.initializer.line) + else: + builder.assign_if_bitmap_unset( + target.register, get_default, nb, arg.initializer.line + ) + nb += 1 def remangle_redefinition_name(name: str) -> str: diff --git a/mypyc/irbuild/callable_class.py b/mypyc/irbuild/callable_class.py index 1170e3fc7363..d3ee54a208cd 100644 --- a/mypyc/irbuild/callable_class.py +++ b/mypyc/irbuild/callable_class.py @@ -92,7 +92,10 @@ def add_call_to_callable_class( given callable class, used to represent that function. """ # Since we create a method, we also add a 'self' parameter. - sig = FuncSignature((RuntimeArg(SELF_NAME, object_rprimitive),) + sig.args, sig.ret_type) + nargs = len(sig.args) - sig.num_bitmap_args + sig = FuncSignature( + (RuntimeArg(SELF_NAME, object_rprimitive),) + sig.args[:nargs], sig.ret_type + ) call_fn_decl = FuncDecl("__call__", fn_info.callable_class.ir.name, builder.module_name, sig) call_fn_ir = FuncIR( call_fn_decl, args, blocks, fn_info.fitem.line, traceback_name=fn_info.fitem.name diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 2c412253ec71..59b1c05a0ddb 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -24,7 +24,8 @@ TypeInfo, is_class_var, ) -from mypy.types import ENUM_REMOVED_PROPS, Instance, get_proper_type +from mypy.types import ENUM_REMOVED_PROPS, Instance, UnboundType, get_proper_type +from mypyc.common import PROPSET_PREFIX from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import FuncDecl, FuncSignature from mypyc.ir.ops import ( @@ -53,7 +54,13 @@ object_rprimitive, ) from mypyc.irbuild.builder import IRBuilder -from mypyc.irbuild.function import handle_ext_method, handle_non_ext_method, load_type +from mypyc.irbuild.function import ( + gen_property_getter_ir, + gen_property_setter_ir, + handle_ext_method, + handle_non_ext_method, + load_type, +) from mypyc.irbuild.util import dataclass_type, get_func_def, is_constant, is_dataclass_decorator from mypyc.primitives.dict_ops import dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import py_hasattr_op, py_setattr_op @@ -84,7 +91,7 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: # classes aren't necessarily populated yet at # prepare_class_def time. if any(ir.base_mro[i].base != ir.base_mro[i + 1] for i in range(len(ir.base_mro) - 1)): - builder.error("Non-trait MRO must be linear", cdef.line) + builder.error("Multiple inheritance is not supported (except for traits)", cdef.line) if ir.allow_interpreted_subclasses: for parent in ir.mro: @@ -151,6 +158,26 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: else: builder.error("Unsupported statement in class body", stmt.line) + # Generate implicit property setters/getters + for name, decl in ir.method_decls.items(): + if decl.implicit and decl.is_prop_getter: + getter_ir = gen_property_getter_ir(builder, decl, cdef, ir.is_trait) + builder.functions.append(getter_ir) + ir.methods[getter_ir.decl.name] = getter_ir + + setter_ir = None + setter_name = PROPSET_PREFIX + name + if setter_name in ir.method_decls: + setter_ir = gen_property_setter_ir( + builder, ir.method_decls[setter_name], cdef, ir.is_trait + ) + builder.functions.append(setter_ir) + ir.methods[setter_name] = setter_ir + + ir.properties[name] = (getter_ir, setter_ir) + # TODO: Generate glue method if needed? + # TODO: Do we need interpreted glue methods? Maybe not? + cls_builder.finalize(ir) @@ -451,6 +478,7 @@ def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: "typing.Collection", "typing.Reversible", "typing.Container", + "typing.Sized", ): # HAX: Synthesized base classes added by mypy don't exist at runtime, so skip them. # This could break if they were added explicitly, though... @@ -482,7 +510,11 @@ def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: name = "_NamedTuple" base = builder.get_module_attr("typing", name, cdef.line) else: - base = builder.load_global_str(cls.name, cdef.line) + cls_module = cls.fullname.rsplit(".", 1)[0] + if cls_module == builder.current_module: + base = builder.load_global_str(cls.name, cdef.line) + else: + base = builder.load_module_attr_by_fullname(cls.fullname, cdef.line) bases.append(base) if cls.fullname in MAGIC_TYPED_DICT_CLASSES: # The remaining base classes are synthesized by mypy and should be ignored. @@ -551,6 +583,7 @@ def add_non_ext_class_attr_ann( get_type_info: Callable[[AssignmentStmt], TypeInfo | None] | None = None, ) -> None: """Add a class attribute to __annotations__ of a non-extension class.""" + # FIXME: try to better preserve the special forms and type parameters of generics. typ: Value | None = None if get_type_info is not None: type_info = get_type_info(stmt) @@ -560,7 +593,17 @@ def add_non_ext_class_attr_ann( if typ is None: # FIXME: if get_type_info is not provided, don't fall back to stmt.type? ann_type = get_proper_type(stmt.type) - if isinstance(ann_type, Instance): + if ( + isinstance(stmt.unanalyzed_type, UnboundType) + and stmt.unanalyzed_type.original_str_expr is not None + ): + # Annotation is a forward reference, so don't attempt to load the actual + # type and load the string instead. + # + # TODO: is it possible to determine whether a non-string annotation is + # actually a forward reference due to the __annotations__ future? + typ = builder.load_str(stmt.unanalyzed_type.original_str_expr) + elif isinstance(ann_type, Instance): typ = load_type(builder, ann_type.type, stmt.line) else: typ = builder.add(LoadAddress(type_object_op.type, type_object_op.src, stmt.line)) diff --git a/mypyc/irbuild/constant_fold.py b/mypyc/irbuild/constant_fold.py index 08cf75d9e5ca..4e9eb53b9222 100644 --- a/mypyc/irbuild/constant_fold.py +++ b/mypyc/irbuild/constant_fold.py @@ -1,6 +1,11 @@ """Constant folding of IR values. For example, 3 + 5 can be constant folded into 8. + +This is mostly like mypy.constant_fold, but we can bind some additional +NameExpr and MemberExpr references here, since we have more knowledge +about which definitions can be trusted -- we constant fold only references +to other compiled modules in the same compilation unit. """ from __future__ import annotations @@ -8,6 +13,11 @@ from typing import Union from typing_extensions import Final +from mypy.constant_fold import ( + constant_fold_binary_int_op, + constant_fold_binary_str_op, + constant_fold_unary_int_op, +) from mypy.nodes import Expression, IntExpr, MemberExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var from mypyc.irbuild.builder import IRBuilder @@ -51,50 +61,3 @@ def constant_fold_expr(builder: IRBuilder, expr: Expression) -> ConstantValue | if isinstance(value, int): return constant_fold_unary_int_op(expr.op, value) return None - - -def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: - if op == "+": - return left + right - if op == "-": - return left - right - elif op == "*": - return left * right - elif op == "//": - if right != 0: - return left // right - elif op == "%": - if right != 0: - return left % right - elif op == "&": - return left & right - elif op == "|": - return left | right - elif op == "^": - return left ^ right - elif op == "<<": - if right >= 0: - return left << right - elif op == ">>": - if right >= 0: - return left >> right - elif op == "**": - if right >= 0: - return left**right - return None - - -def constant_fold_unary_int_op(op: str, value: int) -> int | None: - if op == "-": - return -value - elif op == "~": - return ~value - elif op == "+": - return value - return None - - -def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: - if op == "+": - return left + right - return None diff --git a/mypyc/irbuild/env_class.py b/mypyc/irbuild/env_class.py index beb3215389ba..ded8072deb63 100644 --- a/mypyc/irbuild/env_class.py +++ b/mypyc/irbuild/env_class.py @@ -17,11 +17,11 @@ def g() -> int: from __future__ import annotations -from mypy.nodes import FuncDef, SymbolNode -from mypyc.common import ENV_ATTR_NAME, SELF_NAME +from mypy.nodes import Argument, FuncDef, SymbolNode, Var +from mypyc.common import BITMAP_BITS, ENV_ATTR_NAME, SELF_NAME, bitmap_name from mypyc.ir.class_ir import ClassIR from mypyc.ir.ops import Call, GetAttr, SetAttr, Value -from mypyc.ir.rtypes import RInstance, object_rprimitive +from mypyc.ir.rtypes import RInstance, bitmap_rprimitive, object_rprimitive from mypyc.irbuild.builder import IRBuilder, SymbolTarget from mypyc.irbuild.context import FuncInfo, GeneratorClass, ImplicitClass from mypyc.irbuild.targets import AssignmentTargetAttr @@ -159,6 +159,15 @@ def load_outer_envs(builder: IRBuilder, base: ImplicitClass) -> None: index -= 1 +def num_bitmap_args(builder: IRBuilder, args: list[Argument]) -> int: + n = 0 + for arg in args: + t = builder.type_to_rtype(arg.variable.type) + if t.error_overlap and arg.kind.is_optional(): + n += 1 + return (n + (BITMAP_BITS - 1)) // BITMAP_BITS + + def add_args_to_env( builder: IRBuilder, local: bool = True, @@ -166,12 +175,16 @@ def add_args_to_env( reassign: bool = True, ) -> None: fn_info = builder.fn_info + args = fn_info.fitem.arguments + nb = num_bitmap_args(builder, args) if local: - for arg in fn_info.fitem.arguments: + for arg in args: rtype = builder.type_to_rtype(arg.variable.type) builder.add_local_reg(arg.variable, rtype, is_arg=True) + for i in reversed(range(nb)): + builder.add_local_reg(Var(bitmap_name(i)), bitmap_rprimitive, is_arg=True) else: - for arg in fn_info.fitem.arguments: + for arg in args: if is_free_variable(builder, arg.variable) or fn_info.is_generator: rtype = builder.type_to_rtype(arg.variable.type) assert base is not None, "base cannot be None for adding nonlocal args" diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index f6d488ccac42..3f5b795a1436 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Callable, cast +from typing import Callable, Sequence, cast from mypy.nodes import ( ARG_POS, @@ -52,7 +52,10 @@ from mypyc.ir.ops import ( Assign, BasicBlock, + ComparisonOp, + Integer, LoadAddress, + LoadLiteral, RaiseStandardError, Register, TupleGet, @@ -61,11 +64,14 @@ ) from mypyc.ir.rtypes import ( RTuple, + bool_rprimitive, int_rprimitive, + is_fixed_width_rtype, is_int_rprimitive, is_list_rprimitive, is_none_rprimitive, object_rprimitive, + set_rprimitive, ) from mypyc.irbuild.ast_helpers import is_borrow_friendly_expr, process_conditional from mypyc.irbuild.builder import IRBuilder, int_borrow_friendly_op @@ -83,6 +89,7 @@ tokenizer_printf_style, ) from mypyc.irbuild.specialize import apply_function_specialization, apply_method_specialization +from mypyc.irbuild.util import bytes_from_str from mypyc.primitives.bytes_ops import bytes_slice_op from mypyc.primitives.dict_ops import dict_get_item_op, dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op @@ -90,7 +97,7 @@ from mypyc.primitives.list_ops import list_append_op, list_extend_op, list_slice_op from mypyc.primitives.misc_ops import ellipsis_op, get_module_dict_op, new_slice_op, type_op from mypyc.primitives.registry import CFunctionDescription, builtin_names -from mypyc.primitives.set_ops import set_add_op, set_update_op +from mypyc.primitives.set_ops import set_add_op, set_in_op, set_update_op from mypyc.primitives.str_ops import str_slice_op from mypyc.primitives.tuple_ops import list_tuple_op, tuple_slice_op @@ -263,13 +270,20 @@ def transform_super_expr(builder: IRBuilder, o: SuperExpr) -> Value: def transform_call_expr(builder: IRBuilder, expr: CallExpr) -> Value: + callee = expr.callee if isinstance(expr.analyzed, CastExpr): return translate_cast_expr(builder, expr.analyzed) elif isinstance(expr.analyzed, AssertTypeExpr): # Compile to a no-op. return builder.accept(expr.analyzed.expr) + elif ( + isinstance(callee, (NameExpr, MemberExpr)) + and isinstance(callee.node, TypeInfo) + and callee.node.is_newtype + ): + # A call to a NewType type is a no-op at runtime. + return builder.accept(expr.args[0]) - callee = expr.callee if isinstance(callee, IndexExpr) and isinstance(callee.analyzed, TypeApplication): callee = callee.analyzed.expr # Unwrap type application @@ -472,21 +486,26 @@ def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: if folded: return folded + borrow_left = False + borrow_right = False + + ltype = builder.node_type(expr.left) + rtype = builder.node_type(expr.right) + # Special case some int ops to allow borrowing operands. - if is_int_rprimitive(builder.node_type(expr.left)) and is_int_rprimitive( - builder.node_type(expr.right) - ): + if is_int_rprimitive(ltype) and is_int_rprimitive(rtype): if expr.op == "//": expr = try_optimize_int_floor_divide(expr) if expr.op in int_borrow_friendly_op: borrow_left = is_borrow_friendly_expr(builder, expr.right) - left = builder.accept(expr.left, can_borrow=borrow_left) - right = builder.accept(expr.right, can_borrow=True) - return builder.binary_op(left, right, expr.op, expr.line) + borrow_right = True + elif is_fixed_width_rtype(ltype) and is_fixed_width_rtype(rtype): + borrow_left = is_borrow_friendly_expr(builder, expr.right) + borrow_right = True - return builder.binary_op( - builder.accept(expr.left), builder.accept(expr.right), expr.op, expr.line - ) + left = builder.accept(expr.left, can_borrow=borrow_left) + right = builder.accept(expr.right, can_borrow=borrow_right) + return builder.binary_op(left, right, expr.op, expr.line) def try_optimize_int_floor_divide(expr: OpExpr) -> OpExpr: @@ -598,6 +617,54 @@ def transform_conditional_expr(builder: IRBuilder, expr: ConditionalExpr) -> Val return target +def set_literal_values(builder: IRBuilder, items: Sequence[Expression]) -> list[object] | None: + values: list[object] = [] + for item in items: + const_value = constant_fold_expr(builder, item) + if const_value is not None: + values.append(const_value) + continue + + if isinstance(item, RefExpr): + if item.fullname == "builtins.None": + values.append(None) + elif item.fullname == "builtins.True": + values.append(True) + elif item.fullname == "builtins.False": + values.append(False) + elif isinstance(item, (BytesExpr, FloatExpr, ComplexExpr)): + # constant_fold_expr() doesn't handle these (yet?) + v = bytes_from_str(item.value) if isinstance(item, BytesExpr) else item.value + values.append(v) + elif isinstance(item, TupleExpr): + tuple_values = set_literal_values(builder, item.items) + if tuple_values is not None: + values.append(tuple(tuple_values)) + + if len(values) != len(items): + # Bail if not all items can be converted into values. + return None + return values + + +def precompute_set_literal(builder: IRBuilder, s: SetExpr) -> Value | None: + """Try to pre-compute a frozenset literal during module initialization. + + Return None if it's not possible. + + Supported items: + - Anything supported by irbuild.constant_fold.constant_fold_expr() + - None, True, and False + - Float, byte, and complex literals + - Tuple literals with only items listed above + """ + values = set_literal_values(builder, s.items) + if values is not None: + return builder.add(LoadLiteral(frozenset(values), set_rprimitive)) + + return None + + def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: # x in (...)/[...] # x not in (...)/[...] @@ -651,6 +718,23 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: else: return builder.true() + # x in {...} + # x not in {...} + if ( + first_op in ("in", "not in") + and len(e.operators) == 1 + and isinstance(e.operands[1], SetExpr) + ): + set_literal = precompute_set_literal(builder, e.operands[1]) + if set_literal is not None: + lhs = e.operands[0] + result = builder.builder.call_c( + set_in_op, [builder.accept(lhs), set_literal], e.line, bool_rprimitive + ) + if first_op == "not in": + return builder.unary_op(result, "not", e.line) + return result + if len(e.operators) == 1: # Special some common simple cases if first_op in ("is", "is not"): @@ -708,6 +792,25 @@ def transform_basic_comparison( and op in int_comparison_op_mapping.keys() ): return builder.compare_tagged(left, right, op, line) + if is_fixed_width_rtype(left.type) and op in int_comparison_op_mapping.keys(): + if right.type == left.type: + op_id = ComparisonOp.signed_ops[op] + return builder.builder.comparison_op(left, right, op_id, line) + elif isinstance(right, Integer): + op_id = ComparisonOp.signed_ops[op] + return builder.builder.comparison_op( + left, Integer(right.value >> 1, left.type), op_id, line + ) + elif ( + is_fixed_width_rtype(right.type) + and op in int_comparison_op_mapping.keys() + and isinstance(left, Integer) + ): + op_id = ComparisonOp.signed_ops[op] + return builder.builder.comparison_op( + Integer(left.value >> 1, right.type), right, op_id, line + ) + negate = False if op == "is not": op, negate = "is", True diff --git a/mypyc/irbuild/for_helpers.py b/mypyc/irbuild/for_helpers.py index 59b15423fe37..61dbbe960eb2 100644 --- a/mypyc/irbuild/for_helpers.py +++ b/mypyc/irbuild/for_helpers.py @@ -17,6 +17,7 @@ Lvalue, MemberExpr, RefExpr, + SetExpr, TupleExpr, TypeAlias, ) @@ -38,6 +39,7 @@ bool_rprimitive, int_rprimitive, is_dict_rprimitive, + is_fixed_width_rtype, is_list_rprimitive, is_sequence_rprimitive, is_short_int_rprimitive, @@ -468,12 +470,22 @@ def make_for_loop_generator( for_dict_gen.init(expr_reg, target_type) return for_dict_gen + iterable_expr_reg: Value | None = None + if isinstance(expr, SetExpr): + # Special case "for x in ". + from mypyc.irbuild.expression import precompute_set_literal + + set_literal = precompute_set_literal(builder, expr) + if set_literal is not None: + iterable_expr_reg = set_literal + # Default to a generic for loop. - expr_reg = builder.accept(expr) + if iterable_expr_reg is None: + iterable_expr_reg = builder.accept(expr) for_obj = ForIterable(builder, index, body_block, loop_exit, line, nested) item_type = builder._analyze_iterable_item_type(expr) item_rtype = builder.type_to_rtype(item_type) - for_obj.init(expr_reg, item_rtype) + for_obj.init(iterable_expr_reg, item_rtype) return for_obj @@ -887,7 +899,9 @@ def init(self, start_reg: Value, end_reg: Value, step: int) -> None: self.step = step self.end_target = builder.maybe_spill(end_reg) if is_short_int_rprimitive(start_reg.type) and is_short_int_rprimitive(end_reg.type): - index_type = short_int_rprimitive + index_type: RType = short_int_rprimitive + elif is_fixed_width_rtype(end_reg.type): + index_type = end_reg.type else: index_type = int_rprimitive index_reg = Register(index_type) diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index eb35a983866d..02155d70e928 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -13,7 +13,7 @@ from __future__ import annotations from collections import defaultdict -from typing import DefaultDict, NamedTuple, Sequence +from typing import NamedTuple, Sequence from mypy.nodes import ( ArgKind, @@ -28,7 +28,7 @@ Var, ) from mypy.types import CallableType, get_proper_type -from mypyc.common import LAMBDA_NAME, SELF_NAME +from mypyc.common import LAMBDA_NAME, PROPSET_PREFIX, SELF_NAME from mypyc.ir.class_ir import ClassIR, NonExtClassInfo from mypyc.ir.func_ir import ( FUNC_CLASSMETHOD, @@ -89,7 +89,7 @@ from mypyc.primitives.generic_ops import py_setattr_op from mypyc.primitives.misc_ops import register_function from mypyc.primitives.registry import builtin_names -from mypyc.sametype import is_same_method_signature +from mypyc.sametype import is_same_method_signature, is_same_type # Top-level transform functions @@ -123,7 +123,7 @@ def transform_decorator(builder: IRBuilder, dec: Decorator) -> None: # if this is a registered singledispatch implementation with no other decorators), we should # treat this function as a regular function, not a decorated function elif dec.func in builder.fdefs_to_decorators: - # Obtain the the function name in order to construct the name of the helper function. + # Obtain the function name in order to construct the name of the helper function. name = dec.func.fullname.split(".")[-1] # Load the callable object representing the non-decorated function, and decorate it. @@ -397,7 +397,7 @@ def handle_ext_method(builder: IRBuilder, cdef: ClassDef, fdef: FuncDef) -> None builder.functions.append(func_ir) if is_decorated(builder, fdef): - # Obtain the the function name in order to construct the name of the helper function. + # Obtain the function name in order to construct the name of the helper function. _, _, name = fdef.fullname.rpartition(".") # Read the PyTypeObject representing the class, get the callable object # representing the non-decorated method @@ -548,7 +548,7 @@ def is_decorated(builder: IRBuilder, fdef: FuncDef) -> bool: def gen_glue( builder: IRBuilder, - sig: FuncSignature, + base_sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, @@ -566,9 +566,9 @@ def gen_glue( "shadow" glue methods that work with interpreted subclasses. """ if fdef.is_property: - return gen_glue_property(builder, sig, target, cls, base, fdef.line, do_py_ops) + return gen_glue_property(builder, base_sig, target, cls, base, fdef.line, do_py_ops) else: - return gen_glue_method(builder, sig, target, cls, base, fdef.line, do_py_ops) + return gen_glue_method(builder, base_sig, target, cls, base, fdef.line, do_py_ops) class ArgInfo(NamedTuple): @@ -594,7 +594,7 @@ def get_args(builder: IRBuilder, rt_args: Sequence[RuntimeArg], line: int) -> Ar def gen_glue_method( builder: IRBuilder, - sig: FuncSignature, + base_sig: FuncSignature, target: FuncIR, cls: ClassIR, base: ClassIR, @@ -626,16 +626,25 @@ def f(builder: IRBuilder, x: object) -> int: ... If do_pycall is True, then make the call using the C API instead of a native call. """ + check_native_override(builder, base_sig, target.decl.sig, line) + builder.enter() - builder.ret_types[-1] = sig.ret_type + builder.ret_types[-1] = base_sig.ret_type - rt_args = list(sig.args) + rt_args = list(base_sig.args) if target.decl.kind == FUNC_NORMAL: - rt_args[0] = RuntimeArg(sig.args[0].name, RInstance(cls)) + rt_args[0] = RuntimeArg(base_sig.args[0].name, RInstance(cls)) arg_info = get_args(builder, rt_args, line) args, arg_kinds, arg_names = arg_info.args, arg_info.arg_kinds, arg_info.arg_names + bitmap_args = None + if base_sig.num_bitmap_args: + args = args[: -base_sig.num_bitmap_args] + arg_kinds = arg_kinds[: -base_sig.num_bitmap_args] + arg_names = arg_names[: -base_sig.num_bitmap_args] + bitmap_args = builder.builder.args[-base_sig.num_bitmap_args :] + # We can do a passthrough *args/**kwargs with a native call, but if the # args need to get distributed out to arguments, we just let python handle it if any(kind.is_star() for kind in arg_kinds) and any( @@ -655,11 +664,15 @@ def f(builder: IRBuilder, x: object) -> int: ... first, target.name, args[st:], line, arg_kinds[st:], arg_names[st:] ) else: - retval = builder.builder.call(target.decl, args, arg_kinds, arg_names, line) - retval = builder.coerce(retval, sig.ret_type, line) + retval = builder.builder.call( + target.decl, args, arg_kinds, arg_names, line, bitmap_args=bitmap_args + ) + retval = builder.coerce(retval, base_sig.ret_type, line) builder.add(Return(retval)) arg_regs, _, blocks, ret_type, _ = builder.leave() + if base_sig.num_bitmap_args: + rt_args = rt_args[: -base_sig.num_bitmap_args] return FuncIR( FuncDecl( target.name + "__" + base.name + "_glue", @@ -673,6 +686,35 @@ def f(builder: IRBuilder, x: object) -> int: ... ) +def check_native_override( + builder: IRBuilder, base_sig: FuncSignature, sub_sig: FuncSignature, line: int +) -> None: + """Report an error if an override changes signature in unsupported ways. + + Glue methods can work around many signature changes but not all of them. + """ + for base_arg, sub_arg in zip(base_sig.real_args(), sub_sig.real_args()): + if base_arg.type.error_overlap: + if not base_arg.optional and sub_arg.optional and base_sig.num_bitmap_args: + # This would change the meanings of bits in the argument defaults + # bitmap, which we don't support. We'd need to do tricky bit + # manipulations to support this generally. + builder.error( + "An argument with type " + + f'"{base_arg.type}" cannot be given a default value in a method override', + line, + ) + if base_arg.type.error_overlap or sub_arg.type.error_overlap: + if not is_same_type(base_arg.type, sub_arg.type): + # This would change from signaling a default via an error value to + # signaling a default via bitmap, which we don't support. + builder.error( + "Incompatible argument type " + + f'"{sub_arg.type}" (base class has type "{base_arg.type}")', + line, + ) + + def gen_glue_property( builder: IRBuilder, sig: FuncSignature, @@ -747,7 +789,7 @@ def load_type(builder: IRBuilder, typ: TypeInfo, line: int) -> Value: def load_func(builder: IRBuilder, func_name: str, fullname: str | None, line: int) -> Value: - if fullname is not None and not fullname.startswith(builder.current_module): + if fullname and not fullname.startswith(builder.current_module): # we're calling a function in a different module # We can't use load_module_attr_by_fullname here because we need to load the function using @@ -933,7 +975,7 @@ def maybe_insert_into_registry_dict(builder: IRBuilder, fitem: FuncDef) -> None: line = fitem.line is_singledispatch_main_func = fitem in builder.singledispatch_impls # dict of singledispatch_func to list of register_types (fitem is the function to register) - to_register: DefaultDict[FuncDef, list[TypeInfo]] = defaultdict(list) + to_register: defaultdict[FuncDef, list[TypeInfo]] = defaultdict(list) for main_func, impls in builder.singledispatch_impls.items(): for dispatch_type, impl in impls: if fitem == impl: @@ -984,3 +1026,42 @@ def get_native_impl_ids(builder: IRBuilder, singledispatch_func: FuncDef) -> dic """ impls = builder.singledispatch_impls[singledispatch_func] return {impl: i for i, (typ, impl) in enumerate(impls) if not is_decorated(builder, impl)} + + +def gen_property_getter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: + """Generate an implicit trivial property getter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + if not is_trait: + value = builder.builder.get_attr(self_reg, name, func_decl.sig.ret_type, -1) + builder.add(Return(value)) + else: + builder.add(Unreachable()) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) + + +def gen_property_setter_ir( + builder: IRBuilder, func_decl: FuncDecl, cdef: ClassDef, is_trait: bool +) -> FuncIR: + """Generate an implicit trivial property setter for an attribute. + + These are used if an attribute can also be accessed as a property. + """ + name = func_decl.name + builder.enter(name) + self_reg = builder.add_argument("self", func_decl.sig.args[0].type) + value_reg = builder.add_argument("value", func_decl.sig.args[1].type) + assert name.startswith(PROPSET_PREFIX) + attr_name = name[len(PROPSET_PREFIX) :] + if not is_trait: + builder.add(SetAttr(self_reg, attr_name, value_reg, -1)) + builder.add(Return(builder.none())) + args, _, blocks, ret_type, fn_info = builder.leave() + return FuncIR(func_decl, args, blocks) diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index b0648d6e4c5d..2391ccc4d0ed 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -18,9 +18,12 @@ from mypy.operators import op_methods from mypy.types import AnyType, TypeOfAny from mypyc.common import ( + BITMAP_BITS, FAST_ISINSTANCE_MAX_SUBCLASSES, MAX_LITERAL_SHORT_INT, + MAX_SHORT_INT, MIN_LITERAL_SHORT_INT, + MIN_SHORT_INT, PLATFORM_SIZE, use_method_vectorcall, use_vectorcall, @@ -42,6 +45,7 @@ CallC, Cast, ComparisonOp, + Extend, GetAttr, GetElementPtr, Goto, @@ -63,6 +67,7 @@ Unbox, Unreachable, Value, + int_op_to_id, ) from mypyc.ir.rtypes import ( PyListObject, @@ -71,13 +76,16 @@ PyVarObject, RArray, RInstance, + RPrimitive, RTuple, RType, RUnion, bit_rprimitive, + bitmap_rprimitive, bool_rprimitive, bytes_rprimitive, c_int_rprimitive, + c_pointer_rprimitive, c_pyssize_t_rprimitive, c_size_t_rprimitive, dict_rprimitive, @@ -87,6 +95,10 @@ is_bool_rprimitive, is_bytes_rprimitive, is_dict_rprimitive, + is_fixed_width_rtype, + is_int32_rprimitive, + is_int64_rprimitive, + is_int_rprimitive, is_list_rprimitive, is_none_rprimitive, is_set_rprimitive, @@ -124,7 +136,18 @@ py_vectorcall_method_op, py_vectorcall_op, ) -from mypyc.primitives.int_ops import int_comparison_op_mapping +from mypyc.primitives.int_ops import ( + int32_divide_op, + int32_mod_op, + int32_overflow, + int64_divide_op, + int64_mod_op, + int64_to_int_op, + int_comparison_op_mapping, + int_to_int32_op, + int_to_int64_op, + ssize_t_to_int_op, +) from mypyc.primitives.list_ops import list_build_op, list_extend_op, new_list_op from mypyc.primitives.misc_ops import bool_op, fast_isinstance_op, none_object_op from mypyc.primitives.registry import ( @@ -153,6 +176,32 @@ # From CPython PY_VECTORCALL_ARGUMENTS_OFFSET: Final = 1 << (PLATFORM_SIZE * 8 - 1) +FIXED_WIDTH_INT_BINARY_OPS: Final = { + "+", + "-", + "*", + "//", + "%", + "&", + "|", + "^", + "<<", + ">>", + "+=", + "-=", + "*=", + "//=", + "%=", + "&=", + "|=", + "^=", + "<<=", + ">>=", +} + +# Binary operations on bools that are specialized and don't just promote operands to int +BOOL_BINARY_OPS: Final = {"&", "&=", "|", "|=", "^", "^=", "==", "!=", "<", "<=", ">", ">="} + class LowLevelIRBuilder: def __init__(self, current_module: str, mapper: Mapper, options: CompilerOptions) -> None: @@ -250,17 +299,54 @@ def coerce( Returns the register with the converted value (may be same as src). """ - if src.type.is_unboxed and not target_type.is_unboxed: + src_type = src.type + if src_type.is_unboxed and not target_type.is_unboxed: + # Unboxed -> boxed return self.box(src) - if (src.type.is_unboxed and target_type.is_unboxed) and not is_runtime_subtype( - src.type, target_type + if (src_type.is_unboxed and target_type.is_unboxed) and not is_runtime_subtype( + src_type, target_type ): - # To go from one unboxed type to another, we go through a boxed - # in-between value, for simplicity. - tmp = self.box(src) - return self.unbox_or_cast(tmp, target_type, line) - if (not src.type.is_unboxed and target_type.is_unboxed) or not is_subtype( - src.type, target_type + if ( + isinstance(src, Integer) + and is_short_int_rprimitive(src_type) + and is_fixed_width_rtype(target_type) + ): + # TODO: range check + return Integer(src.value >> 1, target_type) + elif is_int_rprimitive(src_type) and is_fixed_width_rtype(target_type): + return self.coerce_int_to_fixed_width(src, target_type, line) + elif is_fixed_width_rtype(src_type) and is_int_rprimitive(target_type): + return self.coerce_fixed_width_to_int(src, line) + elif is_short_int_rprimitive(src_type) and is_fixed_width_rtype(target_type): + return self.coerce_short_int_to_fixed_width(src, target_type, line) + elif ( + isinstance(src_type, RPrimitive) + and isinstance(target_type, RPrimitive) + and src_type.is_native_int + and target_type.is_native_int + and src_type.size == target_type.size + and src_type.is_signed == target_type.is_signed + ): + # Equivalent types + return src + elif (is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type)) and is_tagged( + target_type + ): + shifted = self.int_op( + bool_rprimitive, src, Integer(1, bool_rprimitive), IntOp.LEFT_SHIFT + ) + return self.add(Extend(shifted, target_type, signed=False)) + elif ( + is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) + ) and is_fixed_width_rtype(target_type): + return self.add(Extend(src, target_type, signed=False)) + else: + # To go from one unboxed type to another, we go through a boxed + # in-between value, for simplicity. + tmp = self.box(src) + return self.unbox_or_cast(tmp, target_type, line) + if (not src_type.is_unboxed and target_type.is_unboxed) or not is_subtype( + src_type, target_type ): return self.unbox_or_cast(src, target_type, line, can_borrow=can_borrow) elif force: @@ -269,6 +355,133 @@ def coerce( return tmp return src + def coerce_int_to_fixed_width(self, src: Value, target_type: RType, line: int) -> Value: + assert is_fixed_width_rtype(target_type), target_type + assert isinstance(target_type, RPrimitive) + + res = Register(target_type) + + fast, slow, end = BasicBlock(), BasicBlock(), BasicBlock() + + check = self.check_tagged_short_int(src, line) + self.add(Branch(check, fast, slow, Branch.BOOL)) + + self.activate_block(fast) + + size = target_type.size + if size < int_rprimitive.size: + # Add a range check when the target type is smaller than the source tyoe + fast2, fast3 = BasicBlock(), BasicBlock() + upper_bound = 1 << (size * 8 - 1) + check2 = self.add(ComparisonOp(src, Integer(upper_bound, src.type), ComparisonOp.SLT)) + self.add(Branch(check2, fast2, slow, Branch.BOOL)) + self.activate_block(fast2) + check3 = self.add(ComparisonOp(src, Integer(-upper_bound, src.type), ComparisonOp.SGE)) + self.add(Branch(check3, fast3, slow, Branch.BOOL)) + self.activate_block(fast3) + tmp = self.int_op( + c_pyssize_t_rprimitive, + src, + Integer(1, c_pyssize_t_rprimitive), + IntOp.RIGHT_SHIFT, + line, + ) + tmp = self.add(Truncate(tmp, target_type)) + else: + if size > int_rprimitive.size: + tmp = self.add(Extend(src, target_type, signed=True)) + else: + tmp = src + tmp = self.int_op(target_type, tmp, Integer(1, target_type), IntOp.RIGHT_SHIFT, line) + + self.add(Assign(res, tmp)) + self.goto(end) + + self.activate_block(slow) + if is_int64_rprimitive(target_type) or ( + is_int32_rprimitive(target_type) and size == int_rprimitive.size + ): + # Slow path calls a library function that handles more complex logic + ptr = self.int_op( + pointer_rprimitive, src, Integer(1, pointer_rprimitive), IntOp.XOR, line + ) + ptr2 = Register(c_pointer_rprimitive) + self.add(Assign(ptr2, ptr)) + if is_int64_rprimitive(target_type): + conv_op = int_to_int64_op + else: + conv_op = int_to_int32_op + tmp = self.call_c(conv_op, [ptr2], line) + self.add(Assign(res, tmp)) + self.add(KeepAlive([src])) + self.goto(end) + elif is_int32_rprimitive(target_type): + # Slow path just always generates an OverflowError + self.call_c(int32_overflow, [], line) + self.add(Unreachable()) + else: + assert False, target_type + + self.activate_block(end) + return res + + def coerce_short_int_to_fixed_width(self, src: Value, target_type: RType, line: int) -> Value: + if is_int64_rprimitive(target_type): + return self.int_op(target_type, src, Integer(1, target_type), IntOp.RIGHT_SHIFT, line) + # TODO: i32 + assert False, (src.type, target_type) + + def coerce_fixed_width_to_int(self, src: Value, line: int) -> Value: + if is_int32_rprimitive(src.type) and PLATFORM_SIZE == 8: + # Simple case -- just sign extend and shift. + extended = self.add(Extend(src, c_pyssize_t_rprimitive, signed=True)) + return self.int_op( + int_rprimitive, + extended, + Integer(1, c_pyssize_t_rprimitive), + IntOp.LEFT_SHIFT, + line, + ) + + assert is_fixed_width_rtype(src.type) + assert isinstance(src.type, RPrimitive) + src_type = src.type + + res = Register(int_rprimitive) + + fast, fast2, slow, end = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + + c1 = self.add(ComparisonOp(src, Integer(MAX_SHORT_INT, src_type), ComparisonOp.SLE)) + self.add(Branch(c1, fast, slow, Branch.BOOL)) + + self.activate_block(fast) + c2 = self.add(ComparisonOp(src, Integer(MIN_SHORT_INT, src_type), ComparisonOp.SGE)) + self.add(Branch(c2, fast2, slow, Branch.BOOL)) + + self.activate_block(slow) + if is_int64_rprimitive(src_type): + conv_op = int64_to_int_op + elif is_int32_rprimitive(src_type): + assert PLATFORM_SIZE == 4 + conv_op = ssize_t_to_int_op + else: + assert False, src_type + x = self.call_c(conv_op, [src], line) + self.add(Assign(res, x)) + self.goto(end) + + self.activate_block(fast2) + if int_rprimitive.size < src_type.size: + tmp = self.add(Truncate(src, c_pyssize_t_rprimitive)) + else: + tmp = src + s = self.int_op(int_rprimitive, tmp, Integer(1, tmp.type), IntOp.LEFT_SHIFT, line) + self.add(Assign(res, s)) + self.goto(end) + + self.activate_block(end) + return res + def coerce_nullable(self, src: Value, target_type: RType, line: int) -> Value: """Generate a coercion from a potentially null value.""" if src.type.is_unboxed == target_type.is_unboxed and ( @@ -305,9 +518,12 @@ def get_attr( and obj.type.class_ir.is_ext_class and obj.type.class_ir.has_attr(attr) ): - if borrow: + op = GetAttr(obj, attr, line, borrow=borrow) + # For non-refcounted attribute types, the borrow might be + # disabled even if requested, so don't check 'borrow'. + if op.is_borrowed: self.keep_alives.append(obj) - return self.add(GetAttr(obj, attr, line, borrow=borrow)) + return self.add(op) elif isinstance(obj.type, RUnion): return self.union_get_attr(obj, obj.type, attr, result_type, line) else: @@ -733,10 +949,19 @@ def call( arg_kinds: list[ArgKind], arg_names: Sequence[str | None], line: int, + *, + bitmap_args: list[Register] | None = None, ) -> Value: - """Call a native function.""" + """Call a native function. + + If bitmap_args is given, they override the values of (some) of the bitmap + arguments used to track the presence of values for certain arguments. By + default, the values of the bitmap arguments are inferred from args. + """ # Normalize args to positionals. - args = self.native_args_to_positional(args, arg_kinds, arg_names, decl.sig, line) + args = self.native_args_to_positional( + args, arg_kinds, arg_names, decl.sig, line, bitmap_args=bitmap_args + ) return self.add(Call(decl, args, line)) def native_args_to_positional( @@ -746,6 +971,8 @@ def native_args_to_positional( arg_names: Sequence[str | None], sig: FuncSignature, line: int, + *, + bitmap_args: list[Register] | None = None, ) -> list[Value]: """Prepare arguments for a native call. @@ -756,8 +983,14 @@ def native_args_to_positional( and coerce arguments to the appropriate type. """ - sig_arg_kinds = [arg.kind for arg in sig.args] - sig_arg_names = [arg.name for arg in sig.args] + sig_args = sig.args + n = sig.num_bitmap_args + if n: + sig_args = sig_args[:-n] + + sig_arg_kinds = [arg.kind for arg in sig_args] + sig_arg_names = [arg.name for arg in sig_args] + concrete_kinds = [concrete_arg_kind(arg_kind) for arg_kind in arg_kinds] formal_to_actual = map_actuals_to_formals( concrete_kinds, @@ -770,7 +1003,7 @@ def native_args_to_positional( # First scan for */** and construct those has_star = has_star2 = False star_arg_entries = [] - for lst, arg in zip(formal_to_actual, sig.args): + for lst, arg in zip(formal_to_actual, sig_args): if arg.kind.is_star(): star_arg_entries.extend([(args[i], arg_kinds[i], arg_names[i]) for i in lst]) has_star = has_star or arg.kind == ARG_STAR @@ -783,8 +1016,8 @@ def native_args_to_positional( # Flatten out the arguments, loading error values for default # arguments, constructing tuples/dicts for star args, and # coercing everything to the expected type. - output_args = [] - for lst, arg in zip(formal_to_actual, sig.args): + output_args: list[Value] = [] + for lst, arg in zip(formal_to_actual, sig_args): if arg.kind == ARG_STAR: assert star_arg output_arg = star_arg @@ -792,7 +1025,10 @@ def native_args_to_positional( assert star2_arg output_arg = star2_arg elif not lst: - output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) + if is_fixed_width_rtype(arg.type): + output_arg = Integer(0, arg.type) + else: + output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) else: base_arg = args[lst[0]] @@ -803,6 +1039,22 @@ def native_args_to_positional( output_args.append(output_arg) + for i in reversed(range(n)): + if bitmap_args and i < len(bitmap_args): + # Use override provided by caller + output_args.append(bitmap_args[i]) + continue + # Infer values of bitmap args + bitmap = 0 + c = 0 + for lst, arg in zip(formal_to_actual, sig_args): + if arg.kind.is_optional() and arg.type.error_overlap: + if i * BITMAP_BITS <= c < (i + 1) * BITMAP_BITS: + if lst: + bitmap |= 1 << (c & (BITMAP_BITS - 1)) + c += 1 + output_args.append(Integer(bitmap, bitmap_rprimitive)) + return output_args def gen_method_call( @@ -965,7 +1217,13 @@ def load_native_type_object(self, fullname: str) -> Value: return self.add(LoadStatic(object_rprimitive, name, module, NAMESPACE_TYPE)) # Other primitive operations + def binary_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: + """Perform a binary operation. + + Generate specialized operations based on operand types, with a fallback + to generic operations. + """ ltype = lreg.type rtype = rreg.type @@ -990,14 +1248,86 @@ def binary_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: return self.compare_bytes(lreg, rreg, op, line) if is_tagged(ltype) and is_tagged(rtype) and op in int_comparison_op_mapping: return self.compare_tagged(lreg, rreg, op, line) - if ( - is_bool_rprimitive(ltype) - and is_bool_rprimitive(rtype) - and op in ("&", "&=", "|", "|=", "^", "^=") - ): - return self.bool_bitwise_op(lreg, rreg, op[0], line) + if is_bool_rprimitive(ltype) and is_bool_rprimitive(rtype) and op in BOOL_BINARY_OPS: + if op in ComparisonOp.signed_ops: + return self.bool_comparison_op(lreg, rreg, op, line) + else: + return self.bool_bitwise_op(lreg, rreg, op[0], line) if isinstance(rtype, RInstance) and op in ("in", "not in"): return self.translate_instance_contains(rreg, lreg, op, line) + if is_fixed_width_rtype(ltype): + if op in FIXED_WIDTH_INT_BINARY_OPS: + if op.endswith("="): + op = op[:-1] + if op != "//": + op_id = int_op_to_id[op] + else: + op_id = IntOp.DIV + if is_bool_rprimitive(rtype) or is_bit_rprimitive(rtype): + rreg = self.coerce(rreg, ltype, line) + rtype = ltype + if is_fixed_width_rtype(rtype) or is_tagged(rtype): + return self.fixed_width_int_op(ltype, lreg, rreg, op_id, line) + if isinstance(rreg, Integer): + # TODO: Check what kind of Integer + return self.fixed_width_int_op( + ltype, lreg, Integer(rreg.value >> 1, ltype), op_id, line + ) + elif op in ComparisonOp.signed_ops: + if is_int_rprimitive(rtype): + rreg = self.coerce_int_to_fixed_width(rreg, ltype, line) + elif is_bool_rprimitive(rtype) or is_bit_rprimitive(rtype): + rreg = self.coerce(rreg, ltype, line) + op_id = ComparisonOp.signed_ops[op] + if is_fixed_width_rtype(rreg.type): + return self.comparison_op(lreg, rreg, op_id, line) + if isinstance(rreg, Integer): + return self.comparison_op(lreg, Integer(rreg.value >> 1, ltype), op_id, line) + elif is_fixed_width_rtype(rtype): + if op in FIXED_WIDTH_INT_BINARY_OPS: + if op.endswith("="): + op = op[:-1] + if op != "//": + op_id = int_op_to_id[op] + else: + op_id = IntOp.DIV + if isinstance(lreg, Integer): + # TODO: Check what kind of Integer + return self.fixed_width_int_op( + rtype, Integer(lreg.value >> 1, rtype), rreg, op_id, line + ) + if is_tagged(ltype): + return self.fixed_width_int_op(rtype, lreg, rreg, op_id, line) + if is_bool_rprimitive(ltype) or is_bit_rprimitive(ltype): + lreg = self.coerce(lreg, rtype, line) + return self.fixed_width_int_op(rtype, lreg, rreg, op_id, line) + elif op in ComparisonOp.signed_ops: + if is_int_rprimitive(ltype): + lreg = self.coerce_int_to_fixed_width(lreg, rtype, line) + elif is_bool_rprimitive(ltype) or is_bit_rprimitive(ltype): + lreg = self.coerce(lreg, rtype, line) + op_id = ComparisonOp.signed_ops[op] + if isinstance(lreg, Integer): + return self.comparison_op(Integer(lreg.value >> 1, rtype), rreg, op_id, line) + if is_fixed_width_rtype(lreg.type): + return self.comparison_op(lreg, rreg, op_id, line) + + # Mixed int comparisons + if op in ("==", "!="): + op_id = ComparisonOp.signed_ops[op] + if is_tagged(ltype) and is_subtype(rtype, ltype): + rreg = self.coerce(rreg, int_rprimitive, line) + return self.comparison_op(lreg, rreg, op_id, line) + if is_tagged(rtype) and is_subtype(ltype, rtype): + lreg = self.coerce(lreg, int_rprimitive, line) + return self.comparison_op(lreg, rreg, op_id, line) + elif op in op in int_comparison_op_mapping: + if is_tagged(ltype) and is_subtype(rtype, ltype): + rreg = self.coerce(rreg, short_int_rprimitive, line) + return self.compare_tagged(lreg, rreg, op, line) + if is_tagged(rtype) and is_subtype(ltype, rtype): + lreg = self.coerce(lreg, short_int_rprimitive, line) + return self.compare_tagged(lreg, rreg, op, line) call_c_ops_candidates = binary_ops.get(op, []) target = self.matching_call_c(call_c_ops_candidates, [lreg, rreg], line) @@ -1056,7 +1386,7 @@ def compare_tagged_condition( ) -> None: """Compare two tagged integers using given operator (conditional context). - Assume lhs and and rhs are tagged integers. + Assume lhs and rhs are tagged integers. Args: lhs: Left operand @@ -1202,17 +1532,43 @@ def bool_bitwise_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value assert False, op return self.add(IntOp(bool_rprimitive, lreg, rreg, code, line)) + def bool_comparison_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: + op_id = ComparisonOp.signed_ops[op] + return self.comparison_op(lreg, rreg, op_id, line) + def unary_not(self, value: Value, line: int) -> Value: mask = Integer(1, value.type, line) return self.int_op(value.type, value, mask, IntOp.XOR, line) def unary_op(self, value: Value, expr_op: str, line: int) -> Value: typ = value.type - if (is_bool_rprimitive(typ) or is_bit_rprimitive(typ)) and expr_op == "not": - return self.unary_not(value, line) + if is_bool_rprimitive(typ) or is_bit_rprimitive(typ): + if expr_op == "not": + return self.unary_not(value, line) + if expr_op == "+": + return value + if is_fixed_width_rtype(typ): + if expr_op == "-": + # Translate to '0 - x' + return self.int_op(typ, Integer(0, typ), value, IntOp.SUB, line) + elif expr_op == "~": + # Translate to 'x ^ -1' + return self.int_op(typ, value, Integer(-1, typ), IntOp.XOR, line) + elif expr_op == "+": + return value + if isinstance(value, Integer): + # TODO: Overflow? Unsigned? + num = value.value + if is_short_int_rprimitive(typ): + num >>= 1 + return Integer(-num, typ, value.line) + if is_tagged(typ) and expr_op == "+": + return value if isinstance(typ, RInstance): if expr_op == "-": method = "__neg__" + elif expr_op == "+": + method = "__pos__" elif expr_op == "~": method = "__invert__" else: @@ -1329,32 +1685,38 @@ def shortcircuit_helper( self.activate_block(next_block) return target - def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: - if is_runtime_subtype(value.type, int_rprimitive): + def bool_value(self, value: Value) -> Value: + """Return bool(value). + + The result type can be bit_rprimitive or bool_rprimitive. + """ + if is_bool_rprimitive(value.type) or is_bit_rprimitive(value.type): + result = value + elif is_runtime_subtype(value.type, int_rprimitive): zero = Integer(0, short_int_rprimitive) - self.compare_tagged_condition(value, zero, "!=", true, false, value.line) - return + result = self.comparison_op(value, zero, ComparisonOp.NEQ, value.line) + elif is_fixed_width_rtype(value.type): + zero = Integer(0, value.type) + result = self.add(ComparisonOp(value, zero, ComparisonOp.NEQ)) elif is_same_type(value.type, str_rprimitive): - value = self.call_c(str_check_if_true, [value], value.line) + result = self.call_c(str_check_if_true, [value], value.line) elif is_same_type(value.type, list_rprimitive) or is_same_type( value.type, dict_rprimitive ): length = self.builtin_len(value, value.line) zero = Integer(0) - value = self.binary_op(length, zero, "!=", value.line) + result = self.binary_op(length, zero, "!=", value.line) elif ( isinstance(value.type, RInstance) and value.type.class_ir.is_ext_class and value.type.class_ir.has_method("__bool__") ): # Directly call the __bool__ method on classes that have it. - value = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) + result = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) else: value_type = optional_value_type(value.type) if value_type is not None: - is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) - branch = Branch(is_none, true, false, Branch.BOOL) - self.add(branch) + not_none = self.translate_is_op(value, self.none_object(), "is not", value.line) always_truthy = False if isinstance(value_type, RInstance): # check whether X.__bool__ is always just the default (object.__bool__) @@ -1363,18 +1725,55 @@ def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> ) and value_type.class_ir.is_method_final("__bool__"): always_truthy = True - if not always_truthy: - # Optional[X] where X may be falsey and requires a check - branch.true = BasicBlock() - self.activate_block(branch.true) + if always_truthy: + result = not_none + else: + # "X | None" where X may be falsey and requires a check + result = Register(bit_rprimitive) + true, false, end = BasicBlock(), BasicBlock(), BasicBlock() + branch = Branch(not_none, true, false, Branch.BOOL) + self.add(branch) + self.activate_block(true) # unbox_or_cast instead of coerce because we want the # type to change even if it is a subtype. remaining = self.unbox_or_cast(value, value_type, value.line) - self.add_bool_branch(remaining, true, false) - return - elif not is_bool_rprimitive(value.type) and not is_bit_rprimitive(value.type): - value = self.call_c(bool_op, [value], value.line) - self.add(Branch(value, true, false, Branch.BOOL)) + as_bool = self.bool_value(remaining) + self.add(Assign(result, as_bool)) + self.goto(end) + self.activate_block(false) + self.add(Assign(result, Integer(0, bit_rprimitive))) + self.goto(end) + self.activate_block(end) + else: + result = self.call_c(bool_op, [value], value.line) + return result + + def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None: + opt_value_type = optional_value_type(value.type) + if opt_value_type is None: + bool_value = self.bool_value(value) + self.add(Branch(bool_value, true, false, Branch.BOOL)) + else: + # Special-case optional types + is_none = self.translate_is_op(value, self.none_object(), "is not", value.line) + branch = Branch(is_none, true, false, Branch.BOOL) + self.add(branch) + always_truthy = False + if isinstance(opt_value_type, RInstance): + # check whether X.__bool__ is always just the default (object.__bool__) + if not opt_value_type.class_ir.has_method( + "__bool__" + ) and opt_value_type.class_ir.is_method_final("__bool__"): + always_truthy = True + + if not always_truthy: + # Optional[X] where X may be falsey and requires a check + branch.true = BasicBlock() + self.activate_block(branch.true) + # unbox_or_cast instead of coerce because we want the + # type to change even if it is a subtype. + remaining = self.unbox_or_cast(value, opt_value_type, value.line) + self.add_bool_branch(remaining, true, false) def call_c( self, @@ -1479,9 +1878,99 @@ def matching_call_c( return target return None - def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value: + def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) -> Value: + """Generate a native integer binary op. + + Use native/C semantics, which sometimes differ from Python + semantics. + + Args: + type: Either int64_rprimitive or int32_rprimitive + op: IntOp.* constant (e.g. IntOp.ADD) + """ return self.add(IntOp(type, lhs, rhs, op, line)) + def fixed_width_int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value: + """Generate a binary op using Python fixed-width integer semantics. + + These may differ in overflow/rounding behavior from native/C ops. + + Args: + type: Either int64_rprimitive or int32_rprimitive + op: IntOp.* constant (e.g. IntOp.ADD) + """ + lhs = self.coerce(lhs, type, line) + rhs = self.coerce(rhs, type, line) + if op == IntOp.DIV: + # Inline simple division by a constant, so that C + # compilers can optimize more + if isinstance(rhs, Integer) and rhs.value not in (-1, 0): + return self.inline_fixed_width_divide(type, lhs, rhs, line) + if is_int64_rprimitive(type): + prim = int64_divide_op + elif is_int32_rprimitive(type): + prim = int32_divide_op + else: + assert False, type + return self.call_c(prim, [lhs, rhs], line) + if op == IntOp.MOD: + # Inline simple % by a constant, so that C + # compilers can optimize more + if isinstance(rhs, Integer) and rhs.value not in (-1, 0): + return self.inline_fixed_width_mod(type, lhs, rhs, line) + if is_int64_rprimitive(type): + prim = int64_mod_op + elif is_int32_rprimitive(type): + prim = int32_mod_op + else: + assert False, type + return self.call_c(prim, [lhs, rhs], line) + return self.int_op(type, lhs, rhs, op, line) + + def inline_fixed_width_divide(self, type: RType, lhs: Value, rhs: Value, line: int) -> Value: + # Perform floor division (native division truncates) + res = Register(type) + div = self.int_op(type, lhs, rhs, IntOp.DIV, line) + self.add(Assign(res, div)) + diff_signs = self.is_different_native_int_signs(type, lhs, rhs, line) + tricky, adjust, done = BasicBlock(), BasicBlock(), BasicBlock() + self.add(Branch(diff_signs, done, tricky, Branch.BOOL)) + self.activate_block(tricky) + mul = self.int_op(type, res, rhs, IntOp.MUL, line) + mul_eq = self.add(ComparisonOp(mul, lhs, ComparisonOp.EQ, line)) + adjust = BasicBlock() + self.add(Branch(mul_eq, done, adjust, Branch.BOOL)) + self.activate_block(adjust) + adj = self.int_op(type, res, Integer(1, type), IntOp.SUB, line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(done) + return res + + def inline_fixed_width_mod(self, type: RType, lhs: Value, rhs: Value, line: int) -> Value: + # Perform floor modulus + res = Register(type) + mod = self.int_op(type, lhs, rhs, IntOp.MOD, line) + self.add(Assign(res, mod)) + diff_signs = self.is_different_native_int_signs(type, lhs, rhs, line) + tricky, adjust, done = BasicBlock(), BasicBlock(), BasicBlock() + self.add(Branch(diff_signs, done, tricky, Branch.BOOL)) + self.activate_block(tricky) + is_zero = self.add(ComparisonOp(res, Integer(0, type), ComparisonOp.EQ, line)) + adjust = BasicBlock() + self.add(Branch(is_zero, done, adjust, Branch.BOOL)) + self.activate_block(adjust) + adj = self.int_op(type, res, rhs, IntOp.ADD, line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(done) + return res + + def is_different_native_int_signs(self, type: RType, a: Value, b: Value, line: int) -> Value: + neg1 = self.add(ComparisonOp(a, Integer(0, type), ComparisonOp.SLT, line)) + neg2 = self.add(ComparisonOp(b, Integer(0, type), ComparisonOp.SLT, line)) + return self.add(ComparisonOp(neg1, neg2, ComparisonOp.EQ, line)) + def comparison_op(self, lhs: Value, rhs: Value, op: int, line: int) -> Value: return self.add(ComparisonOp(lhs, rhs, op, line)) diff --git a/mypyc/irbuild/main.py b/mypyc/irbuild/main.py index e20872979b7a..9bbb90aad207 100644 --- a/mypyc/irbuild/main.py +++ b/mypyc/irbuild/main.py @@ -57,7 +57,11 @@ def build_ir( options: CompilerOptions, errors: Errors, ) -> ModuleIRs: - """Build IR for a set of modules that have been type-checked by mypy.""" + """Build basic IR for a set of modules that have been type-checked by mypy. + + The returned IR is not complete and requires additional + transformations, such as the insertion of refcount handling. + """ build_type_map(mapper, modules, graph, types, options, errors) singledispatch_info = find_singledispatch_register_impls(modules, errors) diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index 6d6ce1576b54..dddb35230fd5 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -32,6 +32,8 @@ bytes_rprimitive, dict_rprimitive, float_rprimitive, + int32_rprimitive, + int64_rprimitive, int_rprimitive, list_rprimitive, none_rprimitive, @@ -96,6 +98,10 @@ def type_to_rtype(self, typ: Type | None) -> RType: return RUnion([inst, object_rprimitive]) else: return inst + elif typ.type.fullname == "mypy_extensions.i64": + return int64_rprimitive + elif typ.type.fullname == "mypy_extensions.i32": + return int32_rprimitive else: return object_rprimitive elif isinstance(typ, TupleType): @@ -110,7 +116,7 @@ def type_to_rtype(self, typ: Type | None) -> RType: elif isinstance(typ, NoneTyp): return none_rprimitive elif isinstance(typ, UnionType): - return RUnion([self.type_to_rtype(item) for item in typ.items]) + return RUnion.make_simplified_union([self.type_to_rtype(item) for item in typ.items]) elif isinstance(typ, AnyType): return object_rprimitive elif isinstance(typ, TypeType): diff --git a/mypyc/irbuild/match.py b/mypyc/irbuild/match.py new file mode 100644 index 000000000000..a1e671911ea5 --- /dev/null +++ b/mypyc/irbuild/match.py @@ -0,0 +1,355 @@ +from contextlib import contextmanager +from typing import Generator, List, Optional, Tuple + +from mypy.nodes import MatchStmt, NameExpr, TypeInfo +from mypy.patterns import ( + AsPattern, + ClassPattern, + MappingPattern, + OrPattern, + Pattern, + SequencePattern, + SingletonPattern, + StarredPattern, + ValuePattern, +) +from mypy.traverser import TraverserVisitor +from mypy.types import Instance, TupleType, get_proper_type +from mypyc.ir.ops import BasicBlock, Value +from mypyc.ir.rtypes import object_rprimitive +from mypyc.irbuild.builder import IRBuilder +from mypyc.primitives.dict_ops import ( + dict_copy, + dict_del_item, + mapping_has_key, + supports_mapping_protocol, +) +from mypyc.primitives.generic_ops import generic_ssize_t_len_op +from mypyc.primitives.list_ops import ( + sequence_get_item, + sequence_get_slice, + supports_sequence_protocol, +) +from mypyc.primitives.misc_ops import fast_isinstance_op, slow_isinstance_op + +# From: https://peps.python.org/pep-0634/#class-patterns +MATCHABLE_BUILTINS = { + "builtins.bool", + "builtins.bytearray", + "builtins.bytes", + "builtins.dict", + "builtins.float", + "builtins.frozenset", + "builtins.int", + "builtins.list", + "builtins.set", + "builtins.str", + "builtins.tuple", +} + + +class MatchVisitor(TraverserVisitor): + builder: IRBuilder + code_block: BasicBlock + next_block: BasicBlock + final_block: BasicBlock + subject: Value + match: MatchStmt + + as_pattern: Optional[AsPattern] = None + + def __init__(self, builder: IRBuilder, match_node: MatchStmt) -> None: + self.builder = builder + + self.code_block = BasicBlock() + self.next_block = BasicBlock() + self.final_block = BasicBlock() + + self.match = match_node + self.subject = builder.accept(match_node.subject) + + def build_match_body(self, index: int) -> None: + self.builder.activate_block(self.code_block) + + guard = self.match.guards[index] + + if guard: + self.code_block = BasicBlock() + + cond = self.builder.accept(guard) + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + + self.builder.accept(self.match.bodies[index]) + self.builder.goto(self.final_block) + + def visit_match_stmt(self, m: MatchStmt) -> None: + for i, pattern in enumerate(m.patterns): + self.code_block = BasicBlock() + self.next_block = BasicBlock() + + pattern.accept(self) + + self.build_match_body(i) + self.builder.activate_block(self.next_block) + + self.builder.goto_and_activate(self.final_block) + + def visit_value_pattern(self, pattern: ValuePattern) -> None: + value = self.builder.accept(pattern.expr) + + cond = self.builder.binary_op(self.subject, value, "==", pattern.expr.line) + + self.bind_as_pattern(value) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_or_pattern(self, pattern: OrPattern) -> None: + backup_block = self.next_block + self.next_block = BasicBlock() + + for p in pattern.patterns: + # Hack to ensure the as pattern is bound to each pattern in the + # "or" pattern, but not every subpattern + backup = self.as_pattern + p.accept(self) + self.as_pattern = backup + + self.builder.activate_block(self.next_block) + self.next_block = BasicBlock() + + self.next_block = backup_block + self.builder.goto(self.next_block) + + def visit_class_pattern(self, pattern: ClassPattern) -> None: + # TODO: use faster instance check for native classes (while still + # making sure to account for inheritence) + isinstance_op = ( + fast_isinstance_op + if self.builder.is_builtin_ref_expr(pattern.class_ref) + else slow_isinstance_op + ) + + cond = self.builder.call_c( + isinstance_op, [self.subject, self.builder.accept(pattern.class_ref)], pattern.line + ) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + self.bind_as_pattern(self.subject, new_block=True) + + if pattern.positionals: + if pattern.class_ref.fullname in MATCHABLE_BUILTINS: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + pattern.positionals[0].accept(self) + + return + + node = pattern.class_ref.node + assert isinstance(node, TypeInfo) + + ty = node.names.get("__match_args__") + assert ty + + match_args_type = get_proper_type(ty.type) + assert isinstance(match_args_type, TupleType) + + match_args: List[str] = [] + + for item in match_args_type.items: + proper_item = get_proper_type(item) + assert isinstance(proper_item, Instance) and proper_item.last_known_value + + match_arg = proper_item.last_known_value.value + assert isinstance(match_arg, str) + + match_args.append(match_arg) + + for i, expr in enumerate(pattern.positionals): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: use faster "get_attr" method instead when calling on native or + # builtin objects + positional = self.builder.py_get_attr(self.subject, match_args[i], expr.line) + + with self.enter_subpattern(positional): + expr.accept(self) + + for key, value in zip(pattern.keyword_keys, pattern.keyword_values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + # TODO: same as above "get_attr" comment + attr = self.builder.py_get_attr(self.subject, key, value.line) + + with self.enter_subpattern(attr): + value.accept(self) + + def visit_as_pattern(self, pattern: AsPattern) -> None: + if pattern.pattern: + old_pattern = self.as_pattern + self.as_pattern = pattern + pattern.pattern.accept(self) + self.as_pattern = old_pattern + + elif pattern.name: + target = self.builder.get_assignment_target(pattern.name) + + self.builder.assign(target, self.subject, pattern.line) + + self.builder.goto(self.code_block) + + def visit_singleton_pattern(self, pattern: SingletonPattern) -> None: + if pattern.value is None: + obj = self.builder.none_object() + elif pattern.value is True: + obj = self.builder.true() + else: + obj = self.builder.false() + + cond = self.builder.binary_op(self.subject, obj, "is", pattern.line) + + self.builder.add_bool_branch(cond, self.code_block, self.next_block) + + def visit_mapping_pattern(self, pattern: MappingPattern) -> None: + is_dict = self.builder.call_c(supports_mapping_protocol, [self.subject], pattern.line) + + self.builder.add_bool_branch(is_dict, self.code_block, self.next_block) + + keys: List[Value] = [] + + for key, value in zip(pattern.keys, pattern.values): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + key_value = self.builder.accept(key) + keys.append(key_value) + + exists = self.builder.call_c(mapping_has_key, [self.subject, key_value], pattern.line) + + self.builder.add_bool_branch(exists, self.code_block, self.next_block) + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + item = self.builder.gen_method_call( + self.subject, "__getitem__", [key_value], object_rprimitive, pattern.line + ) + + with self.enter_subpattern(item): + value.accept(self) + + if pattern.rest: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + rest = self.builder.call_c(dict_copy, [self.subject], pattern.rest.line) + + target = self.builder.get_assignment_target(pattern.rest) + + self.builder.assign(target, rest, pattern.rest.line) + + for i, key_name in enumerate(keys): + self.builder.call_c(dict_del_item, [rest, key_name], pattern.keys[i].line) + + self.builder.goto(self.code_block) + + def visit_sequence_pattern(self, seq_pattern: SequencePattern) -> None: + star_index, capture, patterns = prep_sequence_pattern(seq_pattern) + + is_list = self.builder.call_c(supports_sequence_protocol, [self.subject], seq_pattern.line) + + self.builder.add_bool_branch(is_list, self.code_block, self.next_block) + + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + actual_len = self.builder.call_c(generic_ssize_t_len_op, [self.subject], seq_pattern.line) + min_len = len(patterns) + + is_long_enough = self.builder.binary_op( + actual_len, + self.builder.load_int(min_len), + "==" if star_index is None else ">=", + seq_pattern.line, + ) + + self.builder.add_bool_branch(is_long_enough, self.code_block, self.next_block) + + for i, pattern in enumerate(patterns): + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + if star_index is not None and i >= star_index: + current = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - i), "-", pattern.line + ) + + else: + current = self.builder.load_int(i) + + item = self.builder.call_c(sequence_get_item, [self.subject, current], pattern.line) + + with self.enter_subpattern(item): + pattern.accept(self) + + if capture and star_index is not None: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + capture_end = self.builder.binary_op( + actual_len, self.builder.load_int(min_len - star_index), "-", capture.line + ) + + rest = self.builder.call_c( + sequence_get_slice, + [self.subject, self.builder.load_int(star_index), capture_end], + capture.line, + ) + + target = self.builder.get_assignment_target(capture) + self.builder.assign(target, rest, capture.line) + + self.builder.goto(self.code_block) + + def bind_as_pattern(self, value: Value, new_block: bool = False) -> None: + if self.as_pattern and self.as_pattern.pattern and self.as_pattern.name: + if new_block: + self.builder.activate_block(self.code_block) + self.code_block = BasicBlock() + + target = self.builder.get_assignment_target(self.as_pattern.name) + self.builder.assign(target, value, self.as_pattern.pattern.line) + + self.as_pattern = None + + if new_block: + self.builder.goto(self.code_block) + + @contextmanager + def enter_subpattern(self, subject: Value) -> Generator[None, None, None]: + old_subject = self.subject + self.subject = subject + yield + self.subject = old_subject + + +def prep_sequence_pattern( + seq_pattern: SequencePattern, +) -> Tuple[Optional[int], Optional[NameExpr], List[Pattern]]: + star_index: Optional[int] = None + capture: Optional[NameExpr] = None + patterns: List[Pattern] = [] + + for i, pattern in enumerate(seq_pattern.patterns): + if isinstance(pattern, StarredPattern): + star_index = i + capture = pattern.capture + + else: + patterns.append(pattern) + + return star_index, capture, patterns diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py index 7d52dc8da57c..d99453955002 100644 --- a/mypyc/irbuild/prebuildvisitor.py +++ b/mypyc/irbuild/prebuildvisitor.py @@ -162,12 +162,10 @@ def visit_symbol_node(self, symbol: SymbolNode) -> None: def is_parent(self, fitem: FuncItem, child: FuncItem) -> bool: # Check if child is nested within fdef (possibly indirectly # within multiple nested functions). - if child in self.nested_funcs: - parent = self.nested_funcs[child] - if parent == fitem: - return True - return self.is_parent(fitem, parent) - return False + if child not in self.nested_funcs: + return False + parent = self.nested_funcs[child] + return parent == fitem or self.is_parent(fitem, parent) def add_free_variable(self, symbol: SymbolNode) -> None: # Find the function where the symbol was (likely) first declared, diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index e40dfa0d7c02..b3d10887ce21 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -14,7 +14,7 @@ from __future__ import annotations from collections import defaultdict -from typing import DefaultDict, Iterable, NamedTuple, Tuple +from typing import Iterable, NamedTuple, Tuple from mypy.build import Graph from mypy.nodes import ( @@ -50,7 +50,7 @@ RuntimeArg, ) from mypyc.ir.ops import DeserMaps -from mypyc.ir.rtypes import RInstance, dict_rprimitive, tuple_rprimitive +from mypyc.ir.rtypes import RInstance, RType, dict_rprimitive, none_rprimitive, tuple_rprimitive from mypyc.irbuild.mapper import Mapper from mypyc.irbuild.util import ( get_func_def, @@ -60,6 +60,7 @@ is_trait, ) from mypyc.options import CompilerOptions +from mypyc.sametype import is_same_type def build_type_map( @@ -98,6 +99,12 @@ def build_type_map( else: prepare_non_ext_class_def(module.path, module.fullname, cdef, errors, mapper) + # Prepare implicit attribute accessors as needed if an attribute overrides a property. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + if class_ir.is_ext_class: + prepare_implicit_property_accessors(cdef.info, class_ir, module.fullname, mapper) + # Collect all the functions also. We collect from the symbol table # so that we can easily pick out the right copy of a function that # is conditionally defined. @@ -106,6 +113,24 @@ def build_type_map( prepare_func_def(module.fullname, None, func, mapper) # TODO: what else? + # Check for incompatible attribute definitions that were not + # flagged by mypy but can't be supported when compiling. + for module, cdef in classes: + class_ir = mapper.type_to_ir[cdef.info] + for attr in class_ir.attributes: + for base_ir in class_ir.mro[1:]: + if attr in base_ir.attributes: + if not is_same_type(class_ir.attributes[attr], base_ir.attributes[attr]): + node = cdef.info.names[attr].node + assert node is not None + kind = "trait" if base_ir.is_trait else "class" + errors.error( + f'Type of "{attr}" is incompatible with ' + f'definition in {kind} "{base_ir.name}"', + module.path, + node.line, + ) + def is_from_module(node: SymbolNode, module: MypyFile) -> bool: return node.fullname == module.fullname + "." + node.name @@ -168,6 +193,8 @@ def prepare_method_def( # works correctly. decl.name = PROPSET_PREFIX + decl.name decl.is_prop_setter = True + # Making the argument implicitly positional-only avoids unnecessary glue methods + decl.sig.args[1].pos_only = True ir.method_decls[PROPSET_PREFIX + node.name] = decl if node.func.is_property: @@ -178,15 +205,20 @@ def prepare_method_def( def is_valid_multipart_property_def(prop: OverloadedFuncDef) -> bool: # Checks to ensure supported property decorator semantics - if len(prop.items) == 2: - getter = prop.items[0] - setter = prop.items[1] - if isinstance(getter, Decorator) and isinstance(setter, Decorator): - if getter.func.is_property and len(setter.decorators) == 1: - if isinstance(setter.decorators[0], MemberExpr): - if setter.decorators[0].name == "setter": - return True - return False + if len(prop.items) != 2: + return False + + getter = prop.items[0] + setter = prop.items[1] + + return ( + isinstance(getter, Decorator) + and isinstance(setter, Decorator) + and getter.func.is_property + and len(setter.decorators) == 1 + and isinstance(setter.decorators[0], MemberExpr) + and setter.decorators[0].name == "setter" + ) def can_subclass_builtin(builtin_base: str) -> bool: @@ -207,6 +239,11 @@ def can_subclass_builtin(builtin_base: str) -> bool: def prepare_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper ) -> None: + """Populate the interface-level information in a class IR. + + This includes attribute and method declarations, and the MRO, among other things, but + method bodies are generated in a later pass. + """ ir = mapper.type_to_ir[cdef.info] info = cdef.info @@ -218,8 +255,72 @@ def prepare_class_def( # Supports copy.copy and pickle (including subclasses) ir._serializable = True - # We sort the table for determinism here on Python 3.5 - for name, node in sorted(info.names.items()): + # Check for subclassing from builtin types + for cls in info.mro: + # Special case exceptions and dicts + # XXX: How do we handle *other* things?? + if cls.fullname == "builtins.BaseException": + ir.builtin_base = "PyBaseExceptionObject" + elif cls.fullname == "builtins.dict": + ir.builtin_base = "PyDictObject" + elif cls.fullname.startswith("builtins."): + if not can_subclass_builtin(cls.fullname): + # Note that if we try to subclass a C extension class that + # isn't in builtins, bad things will happen and we won't + # catch it here! But this should catch a lot of the most + # common pitfalls. + errors.error( + "Inheriting from most builtin types is unimplemented", path, cdef.line + ) + + # Set up the parent class + bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] + if len(bases) > 1 and any(not c.is_trait for c in bases) and bases[0].is_trait: + # If the first base is a non-trait, don't ever error here. While it is correct + # to error if a trait comes before the next non-trait base (e.g. non-trait, trait, + # non-trait), it's pointless, confusing noise from the bigger issue: multiple + # inheritance is *not* supported. + errors.error("Non-trait base must appear first in parent list", path, cdef.line) + ir.traits = [c for c in bases if c.is_trait] + + mro = [] # All mypyc base classes + base_mro = [] # Non-trait mypyc base classes + for cls in info.mro: + if cls not in mapper.type_to_ir: + if cls.fullname != "builtins.object": + ir.inherits_python = True + continue + base_ir = mapper.type_to_ir[cls] + if not base_ir.is_trait: + base_mro.append(base_ir) + mro.append(base_ir) + + if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: + ir.inherits_python = True + + base_idx = 1 if not ir.is_trait else 0 + if len(base_mro) > base_idx: + ir.base = base_mro[base_idx] + ir.mro = mro + ir.base_mro = base_mro + + prepare_methods_and_attributes(cdef, ir, path, module_name, errors, mapper) + prepare_init_method(cdef, ir, module_name, mapper) + + for base in bases: + if base.children is not None: + base.children.append(ir) + + if is_dataclass(cdef): + ir.is_augmented = True + + +def prepare_methods_and_attributes( + cdef: ClassDef, ir: ClassIR, path: str, module_name: str, errors: Errors, mapper: Mapper +) -> None: + """Populate attribute and method declarations.""" + info = cdef.info + for name, node in info.names.items(): # Currently all plugin generated methods are dummies and not included. if node.plugin_generated: continue @@ -227,7 +328,15 @@ def prepare_class_def( if isinstance(node.node, Var): assert node.node.type, "Class member %s missing type" % name if not node.node.is_classvar and name not in ("__slots__", "__deletable__"): - ir.attributes[name] = mapper.type_to_rtype(node.node.type) + attr_rtype = mapper.type_to_rtype(node.node.type) + if ir.is_trait and attr_rtype.error_overlap: + # Traits don't have attribute definedness bitmaps, so use + # property accessor methods to access attributes that need them. + # We will generate accessor implementations that use the class bitmap + # for any concrete subclasses. + add_getter_declaration(ir, name, attr_rtype, module_name) + add_setter_declaration(ir, name, attr_rtype, module_name) + ir.attributes[name] = attr_rtype elif isinstance(node.node, (FuncDef, Decorator)): prepare_method_def(ir, module_name, cdef, mapper, node.node) elif isinstance(node.node, OverloadedFuncDef): @@ -244,27 +353,86 @@ def prepare_class_def( assert node.node.impl prepare_method_def(ir, module_name, cdef, mapper, node.node.impl) - # Check for subclassing from builtin types - for cls in info.mro: - # Special case exceptions and dicts - # XXX: How do we handle *other* things?? - if cls.fullname == "builtins.BaseException": - ir.builtin_base = "PyBaseExceptionObject" - elif cls.fullname == "builtins.dict": - ir.builtin_base = "PyDictObject" - elif cls.fullname.startswith("builtins."): - if not can_subclass_builtin(cls.fullname): - # Note that if we try to subclass a C extension class that - # isn't in builtins, bad things will happen and we won't - # catch it here! But this should catch a lot of the most - # common pitfalls. - errors.error( - "Inheriting from most builtin types is unimplemented", path, cdef.line - ) - if ir.builtin_base: ir.attributes.clear() + +def prepare_implicit_property_accessors( + info: TypeInfo, ir: ClassIR, module_name: str, mapper: Mapper +) -> None: + concrete_attributes = set() + for base in ir.base_mro: + for name, attr_rtype in base.attributes.items(): + concrete_attributes.add(name) + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + for base in ir.mro[1:]: + if base.is_trait: + for name, attr_rtype in base.attributes.items(): + if name not in concrete_attributes: + add_property_methods_for_attribute_if_needed( + info, ir, name, attr_rtype, module_name, mapper + ) + + +def add_property_methods_for_attribute_if_needed( + info: TypeInfo, + ir: ClassIR, + attr_name: str, + attr_rtype: RType, + module_name: str, + mapper: Mapper, +) -> None: + """Add getter and/or setter for attribute if defined as property in a base class. + + Only add declarations. The body IR will be synthesized later during irbuild. + """ + for base in info.mro[1:]: + if base in mapper.type_to_ir: + base_ir = mapper.type_to_ir[base] + n = base.names.get(attr_name) + if n is None: + continue + node = n.node + if isinstance(node, Decorator) and node.name not in ir.method_decls: + # Defined as a read-only property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + elif isinstance(node, OverloadedFuncDef) and is_valid_multipart_property_def(node): + # Defined as a read-write property in base class/trait + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + elif base_ir.is_trait and attr_rtype.error_overlap: + add_getter_declaration(ir, attr_name, attr_rtype, module_name) + add_setter_declaration(ir, attr_name, attr_rtype, module_name) + + +def add_getter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + sig = FuncSignature([self_arg], attr_rtype) + decl = FuncDecl(attr_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_getter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[attr_name] = decl + ir.property_types[attr_name] = attr_rtype # TODO: Needed?? + + +def add_setter_declaration( + ir: ClassIR, attr_name: str, attr_rtype: RType, module_name: str +) -> None: + self_arg = RuntimeArg("self", RInstance(ir), pos_only=True) + value_arg = RuntimeArg("value", attr_rtype, pos_only=True) + sig = FuncSignature([self_arg, value_arg], none_rprimitive) + setter_name = PROPSET_PREFIX + attr_name + decl = FuncDecl(setter_name, ir.name, module_name, sig, FUNC_NORMAL) + decl.is_prop_setter = True + decl.implicit = True # Triggers synthesization + ir.method_decls[setter_name] = decl + + +def prepare_init_method(cdef: ClassDef, ir: ClassIR, module_name: str, mapper: Mapper) -> None: # Set up a constructor decl init_node = cdef.info["__init__"].node if not ir.is_trait and not ir.builtin_base and isinstance(init_node, FuncDef): @@ -288,44 +456,11 @@ def prepare_class_def( init_sig.ret_type, ) - ctor_sig = FuncSignature(init_sig.args[1:], RInstance(ir)) + last_arg = len(init_sig.args) - init_sig.num_bitmap_args + ctor_sig = FuncSignature(init_sig.args[1:last_arg], RInstance(ir)) ir.ctor = FuncDecl(cdef.name, None, module_name, ctor_sig) mapper.func_to_decl[cdef.info] = ir.ctor - # Set up the parent class - bases = [mapper.type_to_ir[base.type] for base in info.bases if base.type in mapper.type_to_ir] - if not all(c.is_trait for c in bases[1:]): - errors.error("Non-trait bases must appear first in parent list", path, cdef.line) - ir.traits = [c for c in bases if c.is_trait] - - mro = [] - base_mro = [] - for cls in info.mro: - if cls not in mapper.type_to_ir: - if cls.fullname != "builtins.object": - ir.inherits_python = True - continue - base_ir = mapper.type_to_ir[cls] - if not base_ir.is_trait: - base_mro.append(base_ir) - mro.append(base_ir) - - if cls.defn.removed_base_type_exprs or not base_ir.is_ext_class: - ir.inherits_python = True - - base_idx = 1 if not ir.is_trait else 0 - if len(base_mro) > base_idx: - ir.base = base_mro[base_idx] - ir.mro = mro - ir.base_mro = base_mro - - for base in bases: - if base.children is not None: - base.children.append(ir) - - if is_dataclass(cdef): - ir.is_augmented = True - def prepare_non_ext_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper @@ -379,7 +514,7 @@ def __init__(self, errors: Errors) -> None: super().__init__() # Map of main singledispatch function to list of registered implementations - self.singledispatch_impls: DefaultDict[FuncDef, list[RegisterImplInfo]] = defaultdict(list) + self.singledispatch_impls: defaultdict[FuncDef, list[RegisterImplInfo]] = defaultdict(list) # Map of decorated function to the indices of any decorators to remove self.decorators_to_remove: dict[FuncDef, list[int]] = {} diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index d09d1bd05687..8cb24c5b47da 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -32,14 +32,32 @@ TupleExpr, ) from mypy.types import AnyType, TypeOfAny -from mypyc.ir.ops import BasicBlock, Integer, RaiseStandardError, Register, Unreachable, Value +from mypyc.ir.ops import ( + BasicBlock, + Extend, + Integer, + RaiseStandardError, + Register, + Truncate, + Unreachable, + Value, +) from mypyc.ir.rtypes import ( + RInstance, RTuple, RType, bool_rprimitive, c_int_rprimitive, dict_rprimitive, + int32_rprimitive, + int64_rprimitive, + int_rprimitive, + is_bool_rprimitive, is_dict_rprimitive, + is_fixed_width_rtype, + is_int32_rprimitive, + is_int64_rprimitive, + is_int_rprimitive, is_list_rprimitive, list_rprimitive, set_rprimitive, @@ -138,6 +156,31 @@ def translate_globals(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Va return None +@specialize_function("builtins.abs") +@specialize_function("builtins.int") +@specialize_function("builtins.float") +@specialize_function("builtins.complex") +@specialize_function("mypy_extensions.i64") +@specialize_function("mypy_extensions.i32") +def translate_builtins_with_unary_dunder( + builder: IRBuilder, expr: CallExpr, callee: RefExpr +) -> Value | None: + """Specialize calls on native classes that implement the associated dunder.""" + if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS] and isinstance(callee, NameExpr): + arg = expr.args[0] + arg_typ = builder.node_type(arg) + shortname = callee.fullname.split(".")[1] + if shortname in ("i64", "i32"): + method = "__int__" + else: + method = f"__{shortname}__" + if isinstance(arg_typ, RInstance) and arg_typ.class_ir.has_method(method): + obj = builder.accept(arg) + return builder.gen_method_call(obj, method, [], None, expr.line) + + return None + + @specialize_function("builtins.len") def translate_len(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: if len(expr.args) == 1 and expr.arg_kinds == [ARG_POS]: @@ -626,3 +669,62 @@ def translate_fstring(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Va return join_formatted_strings(builder, None, substitutions, expr.line) return None + + +@specialize_function("mypy_extensions.i64") +def translate_i64(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_int64_rprimitive(arg_type): + return builder.accept(arg) + elif is_int32_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Extend(val, int64_rprimitive, signed=True, line=expr.line)) + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): + val = builder.accept(arg) + return builder.coerce(val, int64_rprimitive, expr.line) + return None + + +@specialize_function("mypy_extensions.i32") +def translate_i32(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_int32_rprimitive(arg_type): + return builder.accept(arg) + elif is_int64_rprimitive(arg_type): + val = builder.accept(arg) + return builder.add(Truncate(val, int32_rprimitive, line=expr.line)) + elif is_int_rprimitive(arg_type) or is_bool_rprimitive(arg_type): + val = builder.accept(arg) + return builder.coerce(val, int32_rprimitive, expr.line) + return None + + +@specialize_function("builtins.int") +def translate_int(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if ( + is_bool_rprimitive(arg_type) + or is_int_rprimitive(arg_type) + or is_fixed_width_rtype(arg_type) + ): + src = builder.accept(arg) + return builder.coerce(src, int_rprimitive, expr.line) + return None + + +@specialize_function("builtins.bool") +def translate_bool(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value | None: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + src = builder.accept(arg) + return builder.builder.bool_value(src) diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index 371a305e67b9..b9754ba1a147 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -28,6 +28,7 @@ ImportFrom, ListExpr, Lvalue, + MatchStmt, OperatorAssignmentStmt, RaiseStmt, ReturnStmt, @@ -49,6 +50,7 @@ Integer, LoadAddress, LoadErrorValue, + MethodCall, RaiseStandardError, Register, Return, @@ -60,6 +62,7 @@ RInstance, exc_rtuple, is_tagged, + none_rprimitive, object_pointer_rprimitive, object_rprimitive, ) @@ -99,6 +102,8 @@ yield_from_except_op, ) +from .match import MatchVisitor + GenFunc = Callable[[], None] ValueGenFunc = Callable[[], Value] @@ -616,6 +621,8 @@ def transform_try_stmt(builder: IRBuilder, t: TryStmt) -> None: # constructs that we compile separately. When we have a # try/except/else/finally, we treat the try/except/else as the # body of a try/finally block. + if t.is_star: + builder.error("Exception groups and except* cannot be compiled yet", t.line) if t.finally_body: def transform_try_body() -> None: @@ -652,14 +659,45 @@ def transform_with( al = "a" if is_async else "" mgr_v = builder.accept(expr) - typ = builder.call_c(type_op, [mgr_v], line) - exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) - value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + is_native = isinstance(mgr_v.type, RInstance) + if is_native: + value = builder.add(MethodCall(mgr_v, f"__{al}enter__", args=[], line=line)) + exit_ = None + else: + typ = builder.call_c(type_op, [mgr_v], line) + exit_ = builder.maybe_spill(builder.py_get_attr(typ, f"__{al}exit__", line)) + value = builder.py_call(builder.py_get_attr(typ, f"__{al}enter__", line), [mgr_v], line) + mgr = builder.maybe_spill(mgr_v) exc = builder.maybe_spill_assignable(builder.true()) if is_async: value = emit_await(builder, value, line) + def maybe_natively_call_exit(exc_info: bool) -> Value: + if exc_info: + args = get_sys_exc_info(builder) + else: + none = builder.none_object() + args = [none, none, none] + + if is_native: + assert isinstance(mgr_v.type, RInstance) + exit_val = builder.gen_method_call( + builder.read(mgr), + f"__{al}exit__", + arg_values=args, + line=line, + result_type=none_rprimitive, + ) + else: + assert exit_ is not None + exit_val = builder.py_call(builder.read(exit_), [builder.read(mgr)] + args, line) + + if is_async: + return emit_await(builder, exit_val, line) + else: + return exit_val + def try_body() -> None: if target: builder.assign(builder.get_assignment_target(target), value, line) @@ -668,13 +706,7 @@ def try_body() -> None: def except_body() -> None: builder.assign(exc, builder.false(), line) out_block, reraise_block = BasicBlock(), BasicBlock() - exit_val = builder.py_call( - builder.read(exit_), [builder.read(mgr)] + get_sys_exc_info(builder), line - ) - if is_async: - exit_val = emit_await(builder, exit_val, line) - - builder.add_bool_branch(exit_val, out_block, reraise_block) + builder.add_bool_branch(maybe_natively_call_exit(exc_info=True), out_block, reraise_block) builder.activate_block(reraise_block) builder.call_c(reraise_exception_op, [], NO_TRACEBACK_LINE_NO) builder.add(Unreachable()) @@ -684,13 +716,8 @@ def finally_body() -> None: out_block, exit_block = BasicBlock(), BasicBlock() builder.add(Branch(builder.read(exc), exit_block, out_block, Branch.BOOL)) builder.activate_block(exit_block) - none = builder.none_object() - exit_val = builder.py_call( - builder.read(exit_), [builder.read(mgr), none, none, none], line - ) - if is_async: - emit_await(builder, exit_val, line) + maybe_natively_call_exit(exc_info=False) builder.goto_and_activate(out_block) transform_try_finally_stmt( @@ -896,3 +923,7 @@ def transform_yield_from_expr(builder: IRBuilder, o: YieldFromExpr) -> Value: def transform_await_expr(builder: IRBuilder, o: AwaitExpr) -> Value: return emit_yield_from_or_await(builder, builder.accept(o.expr), o.line, is_await=True) + + +def transform_match_stmt(builder: IRBuilder, m: MatchStmt) -> None: + m.accept(MatchVisitor(builder, m)) diff --git a/mypyc/irbuild/util.py b/mypyc/irbuild/util.py index f50241b96cb3..ed01a59d1214 100644 --- a/mypyc/irbuild/util.py +++ b/mypyc/irbuild/util.py @@ -177,3 +177,13 @@ def is_constant(e: Expression) -> bool: ) ) ) + + +def bytes_from_str(value: str) -> bytes: + """Convert a string representing bytes into actual bytes. + + This is needed because the literal characters of BytesExpr (the + characters inside b'') are stored in BytesExpr.value, whose type is + 'str' not 'bytes'. + """ + return bytes(value, "utf8").decode("unicode-escape").encode("raw-unicode-escape") diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py index dc126d410409..d8725ee04dc5 100644 --- a/mypyc/irbuild/visitor.py +++ b/mypyc/irbuild/visitor.py @@ -131,6 +131,7 @@ transform_import, transform_import_all, transform_import_from, + transform_match_stmt, transform_operator_assignment_stmt, transform_raise_stmt, transform_return_stmt, @@ -242,7 +243,7 @@ def visit_nonlocal_decl(self, stmt: NonlocalDecl) -> None: pass def visit_match_stmt(self, stmt: MatchStmt) -> None: - self.bail("Match statements are not yet supported", stmt.line) + transform_match_stmt(self.builder, stmt) # Expressions diff --git a/mypyc/irbuild/vtable.py b/mypyc/irbuild/vtable.py index a02cd622cee1..2d4f7261e4ca 100644 --- a/mypyc/irbuild/vtable.py +++ b/mypyc/irbuild/vtable.py @@ -40,7 +40,7 @@ def compute_vtable(cls: ClassIR) -> None: for t in [cls] + cls.traits: for fn in itertools.chain(t.methods.values()): # TODO: don't generate a new entry when we overload without changing the type - if fn == cls.get_method(fn.name): + if fn == cls.get_method(fn.name, prefer_method=True): cls.vtable[fn.name] = len(entries) # If the class contains a glue method referring to itself, that is a # shadow glue method to support interpreted subclasses. @@ -60,9 +60,9 @@ def specialize_parent_vtable(cls: ClassIR, parent: ClassIR) -> VTableEntries: for entry in parent.vtable_entries: # Find the original method corresponding to this vtable entry. # (This may not be the method in the entry, if it was overridden.) - orig_parent_method = entry.cls.get_method(entry.name) + orig_parent_method = entry.cls.get_method(entry.name, prefer_method=True) assert orig_parent_method - method_cls = cls.get_method_and_class(entry.name) + method_cls = cls.get_method_and_class(entry.name, prefer_method=True) if method_cls: child_method, defining_cls = method_cls # TODO: emit a wrapper for __init__ that raises or something diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index cffbbb3e1666..016a6d3ea9e0 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -344,6 +344,7 @@ CPyTagged CPyObject_Hash(PyObject *o); PyObject *CPyObject_GetAttr3(PyObject *v, PyObject *name, PyObject *defl); PyObject *CPyIter_Next(PyObject *iter); PyObject *CPyNumber_Power(PyObject *base, PyObject *index); +PyObject *CPyNumber_InPlacePower(PyObject *base, PyObject *index); PyObject *CPyObject_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); @@ -371,6 +372,7 @@ CPyTagged CPyList_Index(PyObject *list, PyObject *obj); PyObject *CPySequence_Multiply(PyObject *seq, CPyTagged t_size); PyObject *CPySequence_RMultiply(CPyTagged t_size, PyObject *seq); PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end); +int CPySequence_Check(PyObject *obj); // Dict operations @@ -402,6 +404,7 @@ PyObject *CPyDict_GetValuesIter(PyObject *dict); tuple_T3CIO CPyDict_NextKey(PyObject *dict_or_iter, CPyTagged offset); tuple_T3CIO CPyDict_NextValue(PyObject *dict_or_iter, CPyTagged offset); tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset); +int CPyMapping_Check(PyObject *obj); // Check that dictionary didn't change size during iteration. static inline char CPyDict_CheckSize(PyObject *dict, CPyTagged size) { @@ -497,13 +500,8 @@ static inline bool CPy_KeepPropagating(void) { } // We want to avoid the public PyErr_GetExcInfo API for these because // it requires a bunch of spurious refcount traffic on the parts of -// the triple we don't care about. Unfortunately the layout of the -// data structure changed in 3.7 so we need to handle that. -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 +// the triple we don't care about. #define CPy_ExcState() PyThreadState_GET()->exc_info -#else -#define CPy_ExcState() PyThreadState_GET() -#endif void CPy_Raise(PyObject *exc); void CPy_Reraise(void); @@ -525,7 +523,7 @@ void CPy_AttributeError(const char *filename, const char *funcname, const char * // Misc operations -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 8 +#if PY_VERSION_HEX >= 0x03080000 #define CPy_TRASHCAN_BEGIN(op, dealloc) Py_TRASHCAN_BEGIN(op, dealloc) #define CPy_TRASHCAN_END(op) Py_TRASHCAN_END #else @@ -600,7 +598,8 @@ int CPyStatics_Initialize(PyObject **statics, const char * const *ints, const double *floats, const double *complex_numbers, - const int *tuples); + const int *tuples, + const int *frozensets); PyObject *CPy_Super(PyObject *builtins, PyObject *self); PyObject *CPy_CallReverseOpMethod(PyObject *left, PyObject *right, const char *op, _Py_Identifier *method); diff --git a/mypyc/lib-rt/dict_ops.c b/mypyc/lib-rt/dict_ops.c index b013a8a5f0b9..c0cc8d5a7f87 100644 --- a/mypyc/lib-rt/dict_ops.c +++ b/mypyc/lib-rt/dict_ops.c @@ -5,6 +5,10 @@ #include #include "CPy.h" +#ifndef Py_TPFLAGS_MAPPING +#define Py_TPFLAGS_MAPPING (1 << 6) +#endif + // Dict subclasses like defaultdict override things in interesting // ways, so we don't want to just directly use the dict methods. Not // sure if it is actually worth doing all this stuff, but it saves @@ -85,7 +89,7 @@ PyObject *CPyDict_SetDefaultWithEmptyDatatype(PyObject *dict, PyObject *key, int data_type) { PyObject *res = CPyDict_GetItem(dict, key); if (!res) { - // CPyDict_GetItem() would generates an PyExc_KeyError + // CPyDict_GetItem() would generates a PyExc_KeyError // when key is not found. PyErr_Clear(); @@ -436,3 +440,7 @@ tuple_T4CIOO CPyDict_NextItem(PyObject *dict_or_iter, CPyTagged offset) { Py_INCREF(ret.f3); return ret; } + +int CPyMapping_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MAPPING; +} diff --git a/mypyc/lib-rt/generic_ops.c b/mypyc/lib-rt/generic_ops.c index 2f4a7941a6da..260cfec5b360 100644 --- a/mypyc/lib-rt/generic_ops.c +++ b/mypyc/lib-rt/generic_ops.c @@ -41,6 +41,11 @@ PyObject *CPyNumber_Power(PyObject *base, PyObject *index) return PyNumber_Power(base, index, Py_None); } +PyObject *CPyNumber_InPlacePower(PyObject *base, PyObject *index) +{ + return PyNumber_InPlacePower(base, index, Py_None); +} + PyObject *CPyObject_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { PyObject *start_obj = CPyTagged_AsObject(start); PyObject *end_obj = CPyTagged_AsObject(end); diff --git a/mypyc/lib-rt/getargsfast.c b/mypyc/lib-rt/getargsfast.c index afb161e643c7..387deed4399b 100644 --- a/mypyc/lib-rt/getargsfast.c +++ b/mypyc/lib-rt/getargsfast.c @@ -18,9 +18,6 @@ #include #include "CPy.h" -/* None of this is supported on Python 3.6 or earlier */ -#if PY_VERSION_HEX >= 0x03070000 - #define PARSER_INITED(parser) ((parser)->kwtuple != NULL) /* Forward */ @@ -570,5 +567,3 @@ skipitem_fast(const char **p_format, va_list *p_va) *p_format = format; } - -#endif diff --git a/mypyc/lib-rt/int_ops.c b/mypyc/lib-rt/int_ops.c index 21d4d4cc5620..5ea2f65d5776 100644 --- a/mypyc/lib-rt/int_ops.c +++ b/mypyc/lib-rt/int_ops.c @@ -544,7 +544,7 @@ int64_t CPyInt64_Divide(int64_t x, int64_t y) { PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); return CPY_LL_INT_ERROR; } - if (y == -1 && x == -1LL << 63) { + if (y == -1 && x == INT64_MIN) { PyErr_SetString(PyExc_OverflowError, "integer division overflow"); return CPY_LL_INT_ERROR; } @@ -562,7 +562,7 @@ int64_t CPyInt64_Remainder(int64_t x, int64_t y) { return CPY_LL_INT_ERROR; } // Edge case: avoid core dump - if (y == -1 && x == -1LL << 63) { + if (y == -1 && x == INT64_MIN) { return 0; } int64_t d = x % y; @@ -607,7 +607,7 @@ int32_t CPyInt32_Divide(int32_t x, int32_t y) { PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); return CPY_LL_INT_ERROR; } - if (y == -1 && x == -1LL << 31) { + if (y == -1 && x == INT32_MIN) { PyErr_SetString(PyExc_OverflowError, "integer division overflow"); return CPY_LL_INT_ERROR; } @@ -625,7 +625,7 @@ int32_t CPyInt32_Remainder(int32_t x, int32_t y) { return CPY_LL_INT_ERROR; } // Edge case: avoid core dump - if (y == -1 && x == -1LL << 31) { + if (y == -1 && x == INT32_MIN) { return 0; } int32_t d = x % y; diff --git a/mypyc/lib-rt/list_ops.c b/mypyc/lib-rt/list_ops.c index cb72662e22ee..df87228a0d10 100644 --- a/mypyc/lib-rt/list_ops.c +++ b/mypyc/lib-rt/list_ops.c @@ -5,6 +5,10 @@ #include #include "CPy.h" +#ifndef Py_TPFLAGS_SEQUENCE +#define Py_TPFLAGS_SEQUENCE (1 << 5) +#endif + PyObject *CPyList_Build(Py_ssize_t len, ...) { Py_ssize_t i; @@ -325,3 +329,7 @@ PyObject *CPyList_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { } return CPyObject_GetSlice(obj, start, end); } + +int CPySequence_Check(PyObject *obj) { + return Py_TYPE(obj)->tp_flags & Py_TPFLAGS_SEQUENCE; +} diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index 90292ce61073..5fda78704bbc 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -3,6 +3,7 @@ // These are registered in mypyc.primitives.misc_ops. #include +#include #include "CPy.h" PyObject *CPy_GetCoro(PyObject *obj) @@ -285,6 +286,11 @@ PyObject *CPyType_FromTemplate(PyObject *template, Py_XDECREF(dummy_class); +#if PY_MINOR_VERSION == 11 + // This is a hack. Python 3.11 doesn't include good public APIs to work with managed + // dicts, which are the default for heap types. So we try to opt-out until Python 3.12. + t->ht_type.tp_flags &= ~Py_TPFLAGS_MANAGED_DICT; +#endif return (PyObject *)t; error: @@ -529,7 +535,8 @@ int CPyStatics_Initialize(PyObject **statics, const char * const *ints, const double *floats, const double *complex_numbers, - const int *tuples) { + const int *tuples, + const int *frozensets) { PyObject **result = statics; // Start with some hard-coded values *result++ = Py_None; @@ -629,6 +636,24 @@ int CPyStatics_Initialize(PyObject **statics, *result++ = obj; } } + if (frozensets) { + int num = *frozensets++; + while (num-- > 0) { + int num_items = *frozensets++; + PyObject *obj = PyFrozenSet_New(NULL); + if (obj == NULL) { + return -1; + } + for (int i = 0; i < num_items; i++) { + PyObject *item = statics[*frozensets++]; + Py_INCREF(item); + if (PySet_Add(obj, item) == -1) { + return -1; + } + } + *result++ = obj; + } + } return 0; } diff --git a/mypyc/lib-rt/pythonsupport.h b/mypyc/lib-rt/pythonsupport.h index cd66c4cb4df8..8a1159a98853 100644 --- a/mypyc/lib-rt/pythonsupport.h +++ b/mypyc/lib-rt/pythonsupport.h @@ -22,7 +22,6 @@ extern "C" { ///////////////////////////////////////// // Adapted from bltinmodule.c in Python 3.7.0 -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 7 _Py_IDENTIFIER(__mro_entries__); static PyObject* update_bases(PyObject *bases) @@ -96,16 +95,8 @@ update_bases(PyObject *bases) Py_XDECREF(new_bases); return NULL; } -#else -static PyObject* -update_bases(PyObject *bases) -{ - return bases; -} -#endif // From Python 3.7's typeobject.c -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 _Py_IDENTIFIER(__init_subclass__); static int init_subclass(PyTypeObject *type, PyObject *kwds) @@ -134,14 +125,6 @@ init_subclass(PyTypeObject *type, PyObject *kwds) return 0; } -#else -static int -init_subclass(PyTypeObject *type, PyObject *kwds) -{ - return 0; -} -#endif - // Adapted from longobject.c in Python 3.7.0 /* This function adapted from PyLong_AsLongLongAndOverflow, but with @@ -306,7 +289,7 @@ list_count(PyListObject *self, PyObject *value) return CPyTagged_ShortFromSsize_t(count); } -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 8 +#if PY_VERSION_HEX < 0x03080000 static PyObject * _PyDict_GetItemStringWithError(PyObject *v, const char *key) { @@ -321,13 +304,7 @@ _PyDict_GetItemStringWithError(PyObject *v, const char *key) } #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 6 -/* _PyUnicode_EqualToASCIIString got added in 3.5.3 (argh!) so we can't actually know - * whether it will be present at runtime, so we just assume we don't have it in 3.5. */ -#define CPyUnicode_EqualToASCIIString(x, y) (PyUnicode_CompareWithASCIIString((x), (y)) == 0) -#elif PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 6 #define CPyUnicode_EqualToASCIIString(x, y) _PyUnicode_EqualToASCIIString(x, y) -#endif // Adapted from genobject.c in Python 3.7.2 // Copied because it wasn't in 3.5.2 and it is undocumented anyways. @@ -390,7 +367,7 @@ _CPyDictView_New(PyObject *dict, PyTypeObject *type) } #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >=10 +#if PY_VERSION_HEX >= 0x030A0000 // 3.10 static int _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { PyObject *tmp = NULL; @@ -404,7 +381,7 @@ _CPyObject_HasAttrId(PyObject *v, _Py_Identifier *name) { #define _CPyObject_HasAttrId _PyObject_HasAttrId #endif -#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 9 +#if PY_VERSION_HEX < 0x03090000 // OneArgs and NoArgs functions got added in 3.9 #define _PyObject_CallMethodIdNoArgs(self, name) \ _PyObject_CallMethodIdObjArgs((self), (name), NULL) diff --git a/mypyc/lib-rt/str_ops.c b/mypyc/lib-rt/str_ops.c index 3c0d275fbe39..90b19001f8f0 100644 --- a/mypyc/lib-rt/str_ops.c +++ b/mypyc/lib-rt/str_ops.c @@ -188,7 +188,7 @@ PyObject *CPyStr_GetSlice(PyObject *obj, CPyTagged start, CPyTagged end) { return CPyObject_GetSlice(obj, start, end); } -/* Check if the given string is true (i.e. it's length isn't zero) */ +/* Check if the given string is true (i.e. its length isn't zero) */ bool CPyStr_IsTrue(PyObject *obj) { Py_ssize_t length = PyUnicode_GET_LENGTH(obj); return length != 0; diff --git a/mypyc/options.py b/mypyc/options.py index d554cbed164f..5f0cf12aeefe 100644 --- a/mypyc/options.py +++ b/mypyc/options.py @@ -13,6 +13,7 @@ def __init__( target_dir: str | None = None, include_runtime_files: bool | None = None, capi_version: tuple[int, int] | None = None, + python_version: tuple[int, int] | None = None, ) -> None: self.strip_asserts = strip_asserts self.multi_file = multi_file @@ -28,3 +29,4 @@ def __init__( # binaries are backward compatible even if no recent API # features are used. self.capi_version = capi_version or sys.version_info[:2] + self.python_version = python_version diff --git a/mypyc/primitives/dict_ops.py b/mypyc/primitives/dict_ops.py index d1dca5a79e63..9f477d0b7b90 100644 --- a/mypyc/primitives/dict_ops.py +++ b/mypyc/primitives/dict_ops.py @@ -63,7 +63,7 @@ ) # Generic one-argument dict constructor: dict(obj) -function_op( +dict_copy = function_op( name="builtins.dict", arg_types=[object_rprimitive], return_type=dict_rprimitive, @@ -301,3 +301,25 @@ c_function_name="PyDict_Size", error_kind=ERR_NEVER, ) + +# Delete an item from a dict +dict_del_item = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyDict_DelItem", + error_kind=ERR_NEG_INT, +) + +supports_mapping_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPyMapping_Check", + error_kind=ERR_NEVER, +) + +mapping_has_key = custom_op( + arg_types=[object_rprimitive, object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="PyMapping_HasKey", + error_kind=ERR_NEVER, +) diff --git a/mypyc/primitives/generic_ops.py b/mypyc/primitives/generic_ops.py index cdaa94931604..3caec0a9875e 100644 --- a/mypyc/primitives/generic_ops.py +++ b/mypyc/primitives/generic_ops.py @@ -75,6 +75,17 @@ priority=0, ) + +function_op( + name="builtins.divmod", + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyNumber_Divmod", + error_kind=ERR_MAGIC, + priority=0, +) + + for op, funcname in [ ("+=", "PyNumber_InPlaceAdd"), ("-=", "PyNumber_InPlaceSubtract"), @@ -98,14 +109,25 @@ priority=0, ) -binary_op( - name="**", - arg_types=[object_rprimitive, object_rprimitive], - return_type=object_rprimitive, - error_kind=ERR_MAGIC, - c_function_name="CPyNumber_Power", - priority=0, -) +for op, c_function in (("**", "CPyNumber_Power"), ("**=", "CPyNumber_InPlacePower")): + binary_op( + name=op, + arg_types=[object_rprimitive, object_rprimitive], + return_type=object_rprimitive, + error_kind=ERR_MAGIC, + c_function_name=c_function, + priority=0, + ) + +for arg_count, c_function in ((2, "CPyNumber_Power"), (3, "PyNumber_Power")): + function_op( + name="builtins.pow", + arg_types=[object_rprimitive] * arg_count, + return_type=object_rprimitive, + error_kind=ERR_MAGIC, + c_function_name=c_function, + priority=0, + ) binary_op( name="in", @@ -145,6 +167,16 @@ priority=0, ) +# abs(obj) +function_op( + name="builtins.abs", + arg_types=[object_rprimitive], + return_type=object_rprimitive, + c_function_name="PyNumber_Absolute", + error_kind=ERR_MAGIC, + priority=0, +) + # obj1[obj2] method_op( name="__getitem__", diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index 55ef16ef5466..7eda9bab7e3c 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -35,39 +35,43 @@ unary_op, ) -# These int constructors produce object_rprimitives that then need to be unboxed -# I guess unboxing ourselves would save a check and branch though? - -# Get the type object for 'builtins.int'. -# For ordinary calls to int() we use a load_address to the type -load_address_op(name="builtins.int", type=object_rprimitive, src="PyLong_Type") - -# int(float). We could do a bit better directly. -function_op( - name="builtins.int", - arg_types=[float_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromFloat", - error_kind=ERR_MAGIC, -) +# Constructors for builtins.int and native int types have the same behavior. In +# interpreted mode, native int types are just aliases to 'int'. +for int_name in ("builtins.int", "mypy_extensions.i64", "mypy_extensions.i32"): + # These int constructors produce object_rprimitives that then need to be unboxed + # I guess unboxing ourselves would save a check and branch though? + + # Get the type object for 'builtins.int' or a native int type. + # For ordinary calls to int() we use a load_address to the type. + # Native ints don't have a separate type object -- we just use 'builtins.int'. + load_address_op(name=int_name, type=object_rprimitive, src="PyLong_Type") + + # int(float). We could do a bit better directly. + function_op( + name=int_name, + arg_types=[float_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromFloat", + error_kind=ERR_MAGIC, + ) -# int(string) -function_op( - name="builtins.int", - arg_types=[str_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromStr", - error_kind=ERR_MAGIC, -) + # int(string) + function_op( + name=int_name, + arg_types=[str_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStr", + error_kind=ERR_MAGIC, + ) -# int(string, base) -function_op( - name="builtins.int", - arg_types=[str_rprimitive, int_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromStrWithBase", - error_kind=ERR_MAGIC, -) + # int(string, base) + function_op( + name=int_name, + arg_types=[str_rprimitive, int_rprimitive], + return_type=object_rprimitive, + c_function_name="CPyLong_FromStrWithBase", + error_kind=ERR_MAGIC, + ) # str(int) int_to_str_op = function_op( @@ -160,15 +164,11 @@ def int_unary_op(name: str, c_function_name: str) -> CFunctionDescription: # c_func_description: the C function to call when operands are tagged integers # c_func_negated: whether to negate the C function call's result # c_func_swap_operands: whether to swap lhs and rhs when call the function -IntComparisonOpDescription = NamedTuple( - "IntComparisonOpDescription", - [ - ("binary_op_variant", int), - ("c_func_description", CFunctionDescription), - ("c_func_negated", bool), - ("c_func_swap_operands", bool), - ], -) +class IntComparisonOpDescription(NamedTuple): + binary_op_variant: int + c_func_description: CFunctionDescription + c_func_negated: bool + c_func_swap_operands: bool # Equals operation on two boxed tagged integers diff --git a/mypyc/primitives/list_ops.py b/mypyc/primitives/list_ops.py index c729e264fc14..7fe3157f3a38 100644 --- a/mypyc/primitives/list_ops.py +++ b/mypyc/primitives/list_ops.py @@ -277,3 +277,24 @@ c_function_name="CPyList_GetSlice", error_kind=ERR_MAGIC, ) + +supports_sequence_protocol = custom_op( + arg_types=[object_rprimitive], + return_type=c_int_rprimitive, + c_function_name="CPySequence_Check", + error_kind=ERR_NEVER, +) + +sequence_get_item = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetItem", + error_kind=ERR_NEVER, +) + +sequence_get_slice = custom_op( + arg_types=[object_rprimitive, c_pyssize_t_rprimitive, c_pyssize_t_rprimitive], + return_type=object_rprimitive, + c_function_name="PySequence_GetSlice", + error_kind=ERR_MAGIC, +) diff --git a/mypyc/primitives/registry.py b/mypyc/primitives/registry.py index d7d171b72cca..1e2cf2695ee7 100644 --- a/mypyc/primitives/registry.py +++ b/mypyc/primitives/registry.py @@ -47,29 +47,27 @@ # is only used for primitives. We translate it away during IR building. ERR_NEG_INT: Final = 10 -CFunctionDescription = NamedTuple( - "CFunctionDescription", - [ - ("name", str), - ("arg_types", List[RType]), - ("return_type", RType), - ("var_arg_type", Optional[RType]), - ("truncated_type", Optional[RType]), - ("c_function_name", str), - ("error_kind", int), - ("steals", StealsDescription), - ("is_borrowed", bool), - ("ordering", Optional[List[int]]), - ("extra_int_constants", List[Tuple[int, RType]]), - ("priority", int), - ], -) + +class CFunctionDescription(NamedTuple): + name: str + arg_types: List[RType] + return_type: RType + var_arg_type: Optional[RType] + truncated_type: Optional[RType] + c_function_name: str + error_kind: int + steals: StealsDescription + is_borrowed: bool + ordering: Optional[List[int]] + extra_int_constants: List[Tuple[int, RType]] + priority: int # A description for C load operations including LoadGlobal and LoadAddress -LoadAddressDescription = NamedTuple( - "LoadAddressDescription", [("name", str), ("type", RType), ("src", str)] -) # name of the target to load +class LoadAddressDescription(NamedTuple): + name: str + type: RType + src: str # name of the target to load # CallC op for method call(such as 'str.join') diff --git a/mypyc/primitives/set_ops.py b/mypyc/primitives/set_ops.py index 801fdad34ea4..fcfb7847dc7d 100644 --- a/mypyc/primitives/set_ops.py +++ b/mypyc/primitives/set_ops.py @@ -54,7 +54,7 @@ ) # item in set -binary_op( +set_in_op = binary_op( name="in", arg_types=[object_rprimitive, set_rprimitive], return_type=c_int_rprimitive, diff --git a/mypyc/sametype.py b/mypyc/sametype.py index a3cfd5c08059..1b811d4e9041 100644 --- a/mypyc/sametype.py +++ b/mypyc/sametype.py @@ -35,7 +35,9 @@ def is_same_method_signature(a: FuncSignature, b: FuncSignature) -> bool: len(a.args) == len(b.args) and is_same_type(a.ret_type, b.ret_type) and all( - is_same_type(t1.type, t2.type) and t1.name == t2.name + is_same_type(t1.type, t2.type) + and ((t1.pos_only and t2.pos_only) or t1.name == t2.name) + and t1.optional == t2.optional for t1, t2 in zip(a.args[1:], b.args[1:]) ) ) diff --git a/mypyc/test-data/commandline.test b/mypyc/test-data/commandline.test index cfd0d708bbda..672e879fbe1e 100644 --- a/mypyc/test-data/commandline.test +++ b/mypyc/test-data/commandline.test @@ -150,7 +150,7 @@ class PureTrait: pass @trait -class Trait1(Concrete1): +class Trait1: pass class Concrete2: @@ -164,14 +164,28 @@ class Trait2(Concrete2): class NonExt(Concrete1): # E: Non-extension classes may not inherit from extension classes pass -class Nope(Trait1, Concrete2): # E: Non-trait bases must appear first in parent list # E: Non-trait MRO must be linear + +class NopeMultipleInheritance(Concrete1, Concrete2): # E: Multiple inheritance is not supported (except for traits) + pass + +class NopeMultipleInheritanceAndBadOrder(Concrete1, Trait1, Concrete2): # E: Multiple inheritance is not supported (except for traits) + pass + +class NopeMultipleInheritanceAndBadOrder2(Concrete1, Concrete2, Trait1): # E: Multiple inheritance is not supported (except for traits) + pass + +class NopeMultipleInheritanceAndBadOrder3(Trait1, Concrete1, Concrete2): # E: Non-trait base must appear first in parent list # E: Multiple inheritance is not supported (except for traits) pass +class NopeBadOrder(Trait1, Concrete2): # E: Non-trait base must appear first in parent list + pass + + @decorator class NonExt2: @property # E: Property setters not supported in non-extension classes def test(self) -> int: - pass + return 0 @test.setter def test(self, x: int) -> None: @@ -223,3 +237,9 @@ def h(arg: str) -> None: @a.register def i(arg: Foo) -> None: pass + +[case testOnlyWarningOutput] +# cmd: test.py + +[file test.py] +names = (str(v) for v in [1, 2, 3]) # W: Treating generator comprehension as list diff --git a/mypyc/test-data/exceptions.test b/mypyc/test-data/exceptions.test index 8b186e234c5e..187551249676 100644 --- a/mypyc/test-data/exceptions.test +++ b/mypyc/test-data/exceptions.test @@ -514,3 +514,128 @@ L13: L14: dec_ref r9 goto L8 + +[case testExceptionWithOverlappingErrorValue] +from mypy_extensions import i64 + +def f() -> i64: + return 0 + +def g() -> i64: + return f() +[out] +def f(): +L0: + return 0 +def g(): + r0 :: int64 + r1 :: bit + r2 :: object + r3 :: int64 +L0: + r0 = f() + r1 = r0 == -113 + if r1 goto L2 else goto L1 :: bool +L1: + return r0 +L2: + r2 = PyErr_Occurred() + if not is_error(r2) goto L3 (error at g:7) else goto L1 +L3: + r3 = :: int64 + return r3 + +[case testExceptionWithNativeAttributeGetAndSet] +class C: + def __init__(self, x: int) -> None: + self.x = x + +def foo(c: C, x: int) -> None: + c.x = x - c.x +[out] +def C.__init__(self, x): + self :: __main__.C + x :: int +L0: + inc_ref x :: int + self.x = x + return 1 +def foo(c, x): + c :: __main__.C + x, r0, r1 :: int + r2 :: bool +L0: + r0 = borrow c.x + r1 = CPyTagged_Subtract(x, r0) + c.x = r1 + return 1 + +[case testExceptionWithLowLevelIntAttribute] +from mypy_extensions import i32, i64 + +class C: + def __init__(self, x: i32, y: i64) -> None: + self.x = x + self.y = y + +def f(c: C) -> None: + c.x + c.y +[out] +def C.__init__(self, x, y): + self :: __main__.C + x :: int32 + y :: int64 +L0: + self.x = x + self.y = y + return 1 +def f(c): + c :: __main__.C + r0 :: int32 + r1 :: int64 +L0: + r0 = c.x + r1 = c.y + return 1 + +[case testConditionallyUndefinedI64] +from mypy_extensions import i64 + +def f(x: i64) -> i64: + if x: + y: i64 = 2 + return y +[out] +def f(x): + x, r0, y :: int64 + __locals_bitmap0 :: uint32 + r1 :: bit + r2, r3 :: uint32 + r4 :: bit + r5 :: bool + r6 :: int64 +L0: + r0 = :: int64 + y = r0 + __locals_bitmap0 = 0 + r1 = x != 0 + if r1 goto L1 else goto L2 :: bool +L1: + y = 2 + r2 = __locals_bitmap0 | 1 + __locals_bitmap0 = r2 +L2: + r3 = __locals_bitmap0 & 1 + r4 = r3 == 0 + if r4 goto L3 else goto L5 :: bool +L3: + r5 = raise UnboundLocalError('local variable "y" referenced before assignment') + if not r5 goto L6 (error at f:-1) else goto L4 :: bool +L4: + unreachable +L5: + return y +L6: + r6 = :: int64 + return r6 diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index d8c4333cafad..27e225f273bc 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -3,15 +3,40 @@ from typing import ( TypeVar, Generic, List, Iterator, Iterable, Dict, Optional, Tuple, Any, Set, - overload, Mapping, Union, Callable, Sequence, FrozenSet + overload, Mapping, Union, Callable, Sequence, FrozenSet, Protocol ) T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) S = TypeVar('S') K = TypeVar('K') # for keys in mapping V = TypeVar('V') # for values in mapping +class __SupportsAbs(Protocol[T_co]): + def __abs__(self) -> T_co: pass + +class __SupportsDivMod(Protocol[T_contra, T_co]): + def __divmod__(self, other: T_contra) -> T_co: ... + +class __SupportsRDivMod(Protocol[T_contra, T_co]): + def __rdivmod__(self, other: T_contra) -> T_co: ... + +_M = TypeVar("_M", contravariant=True) + +class __SupportsPow2(Protocol[T_contra, T_co]): + def __pow__(self, other: T_contra) -> T_co: ... + +class __SupportsPow3NoneOnly(Protocol[T_contra, T_co]): + def __pow__(self, other: T_contra, modulo: None = ...) -> T_co: ... + +class __SupportsPow3(Protocol[T_contra, _M, T_co]): + def __pow__(self, other: T_contra, modulo: _M) -> T_co: ... + +__SupportsSomeKindOfPow = Union[ + __SupportsPow2[Any, Any], __SupportsPow3NoneOnly[Any, Any] | __SupportsPow3[Any, Any, Any] +] + class object: def __init__(self) -> None: pass def __eq__(self, x: object) -> bool: pass @@ -38,8 +63,10 @@ def __pow__(self, n: int, modulo: Optional[int] = None) -> int: pass def __floordiv__(self, x: int) -> int: pass def __truediv__(self, x: float) -> float: pass def __mod__(self, x: int) -> int: pass + def __divmod__(self, x: float) -> Tuple[float, float]: pass def __neg__(self) -> int: pass def __pos__(self) -> int: pass + def __abs__(self) -> int: pass def __invert__(self) -> int: pass def __and__(self, n: int) -> int: pass def __or__(self, n: int) -> int: pass @@ -87,7 +114,11 @@ def __add__(self, n: float) -> float: pass def __sub__(self, n: float) -> float: pass def __mul__(self, n: float) -> float: pass def __truediv__(self, n: float) -> float: pass + def __pow__(self, n: float) -> float: pass def __neg__(self) -> float: pass + def __pos__(self) -> float: pass + def __abs__(self) -> float: pass + def __invert__(self) -> float: pass class complex: def __init__(self, x: object, y: object = None) -> None: pass @@ -164,6 +195,7 @@ def __rmul__(self, i: int) -> List[T]: pass def __iter__(self) -> Iterator[T]: pass def __len__(self) -> int: pass def __contains__(self, item: object) -> int: ... + def __add__(self, x: List[T]) -> List[T]: ... def append(self, x: T) -> None: pass def pop(self, i: int = -1) -> T: pass def count(self, T) -> int: pass @@ -212,12 +244,14 @@ def clear(self) -> None: pass def pop(self) -> T: pass def update(self, x: Iterable[S]) -> None: pass def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... + def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> Set[Union[T, S]]: ... class frozenset(Generic[T]): def __init__(self, i: Optional[Iterable[T]] = None) -> None: pass def __iter__(self) -> Iterator[T]: pass def __len__(self) -> int: pass def __or__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... + def __xor__(self, s: Union[Set[S], FrozenSet[S]]) -> FrozenSet[Union[T, S]]: ... class slice: pass @@ -248,39 +282,26 @@ class Exception(BaseException): def __init__(self, message: Optional[str] = None) -> None: pass class Warning(Exception): pass - class UserWarning(Warning): pass - class TypeError(Exception): pass - class ValueError(Exception): pass - class AttributeError(Exception): pass - class ImportError(Exception): pass - class NameError(Exception): pass - class LookupError(Exception): pass - class KeyError(LookupError): pass - class IndexError(LookupError): pass - class RuntimeError(Exception): pass - class UnicodeEncodeError(RuntimeError): pass - class UnicodeDecodeError(RuntimeError): pass - class NotImplementedError(RuntimeError): pass class StopIteration(Exception): value: Any class ArithmeticError(Exception): pass - -class ZeroDivisionError(Exception): pass +class ZeroDivisionError(ArithmeticError): pass +class OverflowError(ArithmeticError): pass class GeneratorExit(BaseException): pass @@ -308,7 +329,17 @@ def zip(x: Iterable[T], y: Iterable[S]) -> Iterator[Tuple[T, S]]: ... @overload def zip(x: Iterable[T], y: Iterable[S], z: Iterable[V]) -> Iterator[Tuple[T, S, V]]: ... def eval(e: str) -> Any: ... -def abs(x: float) -> float: ... +def abs(x: __SupportsAbs[T]) -> T: ... +@overload +def divmod(x: __SupportsDivMod[T_contra, T_co], y: T_contra) -> T_co: ... +@overload +def divmod(x: T_contra, y: __SupportsRDivMod[T_contra, T_co]) -> T_co: ... +@overload +def pow(base: __SupportsPow2[T_contra, T_co], exp: T_contra, mod: None = None) -> T_co: ... +@overload +def pow(base: __SupportsPow3NoneOnly[T_contra, T_co], exp: T_contra, mod: None = None) -> T_co: ... +@overload +def pow(base: __SupportsPow3[T_contra, _M, T_co], exp: T_contra, mod: _M) -> T_co: ... def exit() -> None: ... def min(x: T, y: T) -> T: ... def max(x: T, y: T) -> T: ... diff --git a/mypyc/test-data/fixtures/testutil.py b/mypyc/test-data/fixtures/testutil.py index 0080b1b4f223..7b4fcc9fc1ca 100644 --- a/mypyc/test-data/fixtures/testutil.py +++ b/mypyc/test-data/fixtures/testutil.py @@ -12,7 +12,7 @@ def assertRaises(typ: type, msg: str = '') -> Iterator[None]: try: yield except Exception as e: - assert isinstance(e, typ), f"{e} is not a {typ.__name__}" + assert isinstance(e, typ), f"{e!r} is not a {typ.__name__}" assert msg in str(e), f'Message "{e}" does not match "{msg}"' else: assert False, f"Expected {typ.__name__} but got no exception" diff --git a/mypyc/test-data/irbuild-any.test b/mypyc/test-data/irbuild-any.test index bace026bc957..8d4e085179ae 100644 --- a/mypyc/test-data/irbuild-any.test +++ b/mypyc/test-data/irbuild-any.test @@ -176,3 +176,65 @@ L6: r4 = unbox(int, r3) n = r4 return 1 + +[case testAbsSpecialization] +# Specialization of native classes that implement __abs__ is checked in +# irbuild-dunders.test +def f() -> None: + a = abs(1) + b = abs(1.1) +[out] +def f(): + r0, r1 :: object + r2, a :: int + r3, r4, b :: float +L0: + r0 = object 1 + r1 = PyNumber_Absolute(r0) + r2 = unbox(int, r1) + a = r2 + r3 = 1.1 + r4 = PyNumber_Absolute(r3) + b = r4 + return 1 + +[case testFunctionBasedOps] +def f() -> None: + a = divmod(5, 2) +def f2() -> int: + return pow(2, 5) +def f3() -> float: + return pow(2, 5, 3) +[out] +def f(): + r0, r1, r2 :: object + r3, a :: tuple[float, float] +L0: + r0 = object 5 + r1 = object 2 + r2 = PyNumber_Divmod(r0, r1) + r3 = unbox(tuple[float, float], r2) + a = r3 + return 1 +def f2(): + r0, r1, r2 :: object + r3 :: int +L0: + r0 = object 2 + r1 = object 5 + r2 = CPyNumber_Power(r0, r1) + r3 = unbox(int, r2) + return r3 +def f3(): + r0, r1, r2, r3 :: object + r4 :: int + r5 :: object +L0: + r0 = object 2 + r1 = object 5 + r2 = object 3 + r3 = PyNumber_Power(r0, r1, r2) + r4 = unbox(int, r3) + r5 = box(int, r4) + return r5 + diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index 8e54b25b673b..a06977d037b2 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -1108,7 +1108,9 @@ L0: return 1 [case testCallableTypes] -from typing import Callable +from typing import Callable, Any +from m import f + def absolute_value(x: int) -> int: return x if x > 0 else -x @@ -1116,7 +1118,7 @@ def call_native_function(x: int) -> int: return absolute_value(x) def call_python_function(x: int) -> int: - return int(x) + return f(x) def return_float() -> float: return 5.0 @@ -1127,6 +1129,9 @@ def return_callable_type() -> Callable[[], float]: def call_callable_type() -> float: f = return_callable_type() return f() +[file m.py] +def f(x: int) -> int: + return x [out] def absolute_value(x): x :: int @@ -1158,14 +1163,18 @@ L0: return r0 def call_python_function(x): x :: int - r0, r1, r2 :: object - r3 :: int + r0 :: dict + r1 :: str + r2, r3, r4 :: object + r5 :: int L0: - r0 = load_address PyLong_Type - r1 = box(int, x) - r2 = PyObject_CallFunctionObjArgs(r0, r1, 0) - r3 = unbox(int, r2) - return r3 + r0 = __main__.globals :: static + r1 = 'f' + r2 = CPyDict_GetItem(r0, r1) + r3 = box(int, x) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + return r5 def return_float(): r0 :: float L0: @@ -2228,243 +2237,6 @@ L0: r1 = CPyTagged_Multiply(4, r0) return r1 -[case testPropertyDerivedGen] -from typing import Callable -class BaseProperty: - @property - def value(self) -> object: - return self._incrementer - - @property - def bad_value(self) -> object: - return self._incrementer - - @property - def next(self) -> BaseProperty: - return BaseProperty(self._incrementer + 1) - - def __init__(self, value: int) -> None: - self._incrementer = value - -class DerivedProperty(BaseProperty): - @property - def value(self) -> int: - return self._incrementer - - @property - def bad_value(self) -> object: - return self._incrementer - - @property - def next(self) -> DerivedProperty: - return DerivedProperty(self._incr_func, self._incr_func(self.value)) - - def __init__(self, incr_func: Callable[[int], int], value: int) -> None: - BaseProperty.__init__(self, value) - self._incr_func = incr_func - - -class AgainProperty(DerivedProperty): - @property - def next(self) -> AgainProperty: - return AgainProperty(self._incr_func, self._incr_func(self._incr_func(self.value))) - - @property - def bad_value(self) -> int: - return self._incrementer -[out] -def BaseProperty.value(self): - self :: __main__.BaseProperty - r0 :: int - r1 :: object -L0: - r0 = self._incrementer - r1 = box(int, r0) - return r1 -def BaseProperty.bad_value(self): - self :: __main__.BaseProperty - r0 :: int - r1 :: object -L0: - r0 = self._incrementer - r1 = box(int, r0) - return r1 -def BaseProperty.next(self): - self :: __main__.BaseProperty - r0, r1 :: int - r2 :: __main__.BaseProperty -L0: - r0 = borrow self._incrementer - r1 = CPyTagged_Add(r0, 2) - keep_alive self - r2 = BaseProperty(r1) - return r2 -def BaseProperty.__init__(self, value): - self :: __main__.BaseProperty - value :: int -L0: - self._incrementer = value - return 1 -def DerivedProperty.value(self): - self :: __main__.DerivedProperty - r0 :: int -L0: - r0 = self._incrementer - return r0 -def DerivedProperty.value__BaseProperty_glue(__mypyc_self__): - __mypyc_self__ :: __main__.DerivedProperty - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.value - r1 = box(int, r0) - return r1 -def DerivedProperty.bad_value(self): - self :: __main__.DerivedProperty - r0 :: int - r1 :: object -L0: - r0 = self._incrementer - r1 = box(int, r0) - return r1 -def DerivedProperty.next(self): - self :: __main__.DerivedProperty - r0 :: object - r1 :: int - r2, r3, r4 :: object - r5 :: int - r6 :: __main__.DerivedProperty -L0: - r0 = self._incr_func - r1 = self.value - r2 = self._incr_func - r3 = box(int, r1) - r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) - r5 = unbox(int, r4) - r6 = DerivedProperty(r0, r5) - return r6 -def DerivedProperty.next__BaseProperty_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.DerivedProperty -L0: - r0 = __mypyc_self__.next - return r0 -def DerivedProperty.__init__(self, incr_func, value): - self :: __main__.DerivedProperty - incr_func :: object - value :: int - r0 :: None -L0: - r0 = BaseProperty.__init__(self, value) - self._incr_func = incr_func - return 1 -def AgainProperty.next(self): - self :: __main__.AgainProperty - r0 :: object - r1 :: int - r2, r3, r4 :: object - r5 :: int - r6, r7, r8 :: object - r9 :: int - r10 :: __main__.AgainProperty -L0: - r0 = self._incr_func - r1 = self.value - r2 = self._incr_func - r3 = box(int, r1) - r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) - r5 = unbox(int, r4) - r6 = self._incr_func - r7 = box(int, r5) - r8 = PyObject_CallFunctionObjArgs(r6, r7, 0) - r9 = unbox(int, r8) - r10 = AgainProperty(r0, r9) - return r10 -def AgainProperty.next__DerivedProperty_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.AgainProperty -L0: - r0 = __mypyc_self__.next - return r0 -def AgainProperty.next__BaseProperty_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.AgainProperty -L0: - r0 = __mypyc_self__.next - return r0 -def AgainProperty.bad_value(self): - self :: __main__.AgainProperty - r0 :: int -L0: - r0 = self._incrementer - return r0 -def AgainProperty.bad_value__DerivedProperty_glue(__mypyc_self__): - __mypyc_self__ :: __main__.AgainProperty - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.bad_value - r1 = box(int, r0) - return r1 -def AgainProperty.bad_value__BaseProperty_glue(__mypyc_self__): - __mypyc_self__ :: __main__.AgainProperty - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.bad_value - r1 = box(int, r0) - return r1 - -[case testPropertyTraitSubclassing] -from mypy_extensions import trait -@trait -class SubclassedTrait: - @property - def this(self) -> SubclassedTrait: - return self - - @property - def boxed(self) -> object: - return 3 - -class DerivingObject(SubclassedTrait): - @property - def this(self) -> DerivingObject: - return self - - @property - def boxed(self) -> int: - return 5 -[out] -def SubclassedTrait.this(self): - self :: __main__.SubclassedTrait -L0: - return self -def SubclassedTrait.boxed(self): - self :: __main__.SubclassedTrait - r0 :: object -L0: - r0 = object 3 - return r0 -def DerivingObject.this(self): - self :: __main__.DerivingObject -L0: - return self -def DerivingObject.this__SubclassedTrait_glue(__mypyc_self__): - __mypyc_self__, r0 :: __main__.DerivingObject -L0: - r0 = __mypyc_self__.this - return r0 -def DerivingObject.boxed(self): - self :: __main__.DerivingObject -L0: - return 10 -def DerivingObject.boxed__SubclassedTrait_glue(__mypyc_self__): - __mypyc_self__ :: __main__.DerivingObject - r0 :: int - r1 :: object -L0: - r0 = __mypyc_self__.boxed - r1 = box(int, r0) - return r1 - [case testNativeIndex] from typing import List class A: @@ -2575,11 +2347,8 @@ def __top_level__(): r92, r93, r94, r95 :: ptr r96 :: dict r97 :: str - r98, r99 :: object - r100 :: dict - r101 :: str - r102 :: int32 - r103 :: bit + r98 :: int32 + r99 :: bit L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -2691,13 +2460,9 @@ L2: set_mem r95, r91 :: builtins.object* keep_alive r88 r96 = __main__.globals :: static - r97 = 'Bar' - r98 = CPyDict_GetItem(r96, r97) - r99 = PyObject_CallFunctionObjArgs(r98, r88, 0) - r100 = __main__.globals :: static - r101 = 'y' - r102 = CPyDict_SetItem(r100, r101, r99) - r103 = r102 >= 0 :: signed + r97 = 'y' + r98 = CPyDict_SetItem(r96, r97, r88) + r99 = r98 >= 0 :: signed return 1 [case testChainedConditional] @@ -3312,8 +3077,7 @@ def call_sum(l, comparison): r1, r2 :: object r3, x :: int r4, r5 :: object - r6 :: bool - r7 :: object + r6, r7 :: bool r8, r9 :: int r10 :: bit L0: @@ -3328,8 +3092,8 @@ L2: r4 = box(int, x) r5 = PyObject_CallFunctionObjArgs(comparison, r4, 0) r6 = unbox(bool, r5) - r7 = box(bool, r6) - r8 = unbox(int, r7) + r7 = r6 << 1 + r8 = extend r7: builtins.bool to builtins.int r9 = CPyTagged_Add(r0, r8) r0 = r9 L3: @@ -3510,7 +3274,7 @@ L2: [case testFinalStaticInt] from typing import Final -x: Final = 1 + 1 +x: Final = 1 + int() def f() -> int: return x - 1 @@ -3821,3 +3585,18 @@ L0: r3 = 0.0 i__redef____redef__ = r3 return 1 + +[case testNewType] +from typing import NewType + +class A: pass + +N = NewType("N", A) + +def f(arg: A) -> N: + return N(arg) +[out] +def f(arg): + arg :: __main__.A +L0: + return arg diff --git a/mypyc/test-data/irbuild-bool.test b/mypyc/test-data/irbuild-bool.test new file mode 100644 index 000000000000..9257d8d63f7e --- /dev/null +++ b/mypyc/test-data/irbuild-bool.test @@ -0,0 +1,463 @@ +[case testBoolToAndFromInt] +from mypy_extensions import i64 + +def bool_to_int(b: bool) -> int: + return b +def int_to_bool(n: int) -> bool: + return bool(n) +def bool_to_i64(b: bool) -> i64: + return b +def i64_to_bool(n: i64) -> bool: + return bool(n) +def bit_to_int(n1: i64, n2: i64) -> int: + return bool(n1 == n2) +def bit_to_i64(n1: i64, n2: i64) -> i64: + return bool(n1 == n2) +[out] +def bool_to_int(b): + b, r0 :: bool + r1 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + return r1 +def int_to_bool(n): + n :: int + r0 :: bit +L0: + r0 = n != 0 + return r0 +def bool_to_i64(b): + b :: bool + r0 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + return r0 +def i64_to_bool(n): + n :: int64 + r0 :: bit +L0: + r0 = n != 0 + return r0 +def bit_to_int(n1, n2): + n1, n2 :: int64 + r0 :: bit + r1 :: bool + r2 :: int +L0: + r0 = n1 == n2 + r1 = r0 << 1 + r2 = extend r1: builtins.bool to builtins.int + return r2 +def bit_to_i64(n1, n2): + n1, n2 :: int64 + r0 :: bit + r1 :: int64 +L0: + r0 = n1 == n2 + r1 = extend r0: bit to int64 + return r1 + +[case testConversionToBool] +from typing import List, Optional + +class C: pass +class D: + def __bool__(self) -> bool: + return True + +def list_to_bool(l: List[str]) -> bool: + return bool(l) + +def always_truthy_instance_to_bool(o: C) -> bool: + return bool(o) + +def instance_to_bool(o: D) -> bool: + return bool(o) + +def optional_truthy_to_bool(o: Optional[C]) -> bool: + return bool(o) + +def optional_maybe_falsey_to_bool(o: Optional[D]) -> bool: + return bool(o) +[out] +def D.__bool__(self): + self :: __main__.D +L0: + return 1 +def list_to_bool(l): + l :: list + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: bit +L0: + r0 = get_element_ptr l ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive l + r2 = r1 << 1 + r3 = r2 != 0 + return r3 +def always_truthy_instance_to_bool(o): + o :: __main__.C + r0 :: int32 + r1 :: bit + r2 :: bool +L0: + r0 = PyObject_IsTrue(o) + r1 = r0 >= 0 :: signed + r2 = truncate r0: int32 to builtins.bool + return r2 +def instance_to_bool(o): + o :: __main__.D + r0 :: bool +L0: + r0 = o.__bool__() + return r0 +def optional_truthy_to_bool(o): + o :: union[__main__.C, None] + r0 :: object + r1 :: bit +L0: + r0 = load_address _Py_NoneStruct + r1 = o != r0 + return r1 +def optional_maybe_falsey_to_bool(o): + o :: union[__main__.D, None] + r0 :: object + r1 :: bit + r2 :: __main__.D + r3 :: bool + r4 :: bit +L0: + r0 = load_address _Py_NoneStruct + r1 = o != r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = cast(__main__.D, o) + r3 = r2.__bool__() + r4 = r3 + goto L3 +L2: + r4 = 0 +L3: + return r4 + +[case testBoolComparisons] +def eq(x: bool, y: bool) -> bool: + return x == y + +def neq(x: bool, y: bool) -> bool: + return x != y + +def lt(x: bool, y: bool) -> bool: + return x < y + +def le(x: bool, y: bool) -> bool: + return x <= y + +def gt(x: bool, y: bool) -> bool: + return x > y + +def ge(x: bool, y: bool) -> bool: + return x >= y +[out] +def eq(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x == y + return r0 +def neq(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x != y + return r0 +def lt(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x < y :: signed + return r0 +def le(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x <= y :: signed + return r0 +def gt(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x > y :: signed + return r0 +def ge(x, y): + x, y :: bool + r0 :: bit +L0: + r0 = x >= y :: signed + return r0 + +[case testBoolMixedComparisons1] +from mypy_extensions import i64 + +def eq1(x: int, y: bool) -> bool: + return x == y + +def eq2(x: bool, y: int) -> bool: + return x == y + +def neq1(x: i64, y: bool) -> bool: + return x != y + +def neq2(x: bool, y: i64) -> bool: + return x != y +[out] +def eq1(x, y): + x :: int + y, r0 :: bool + r1 :: int + r2 :: bit +L0: + r0 = y << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = x == r1 + return r2 +def eq2(x, y): + x :: bool + y :: int + r0 :: bool + r1 :: int + r2 :: bit +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = r1 == y + return r2 +def neq1(x, y): + x :: int64 + y :: bool + r0 :: int64 + r1 :: bit +L0: + r0 = extend y: builtins.bool to int64 + r1 = x != r0 + return r1 +def neq2(x, y): + x :: bool + y, r0 :: int64 + r1 :: bit +L0: + r0 = extend x: builtins.bool to int64 + r1 = r0 != y + return r1 + +[case testBoolMixedComparisons2] +from mypy_extensions import i64 + +def lt1(x: bool, y: int) -> bool: + return x < y + +def lt2(x: int, y: bool) -> bool: + return x < y + +def gt1(x: bool, y: i64) -> bool: + return x < y + +def gt2(x: i64, y: bool) -> bool: + return x < y +[out] +def lt1(x, y): + x :: bool + y :: int + r0 :: bool + r1 :: short_int + r2 :: native_int + r3 :: bit + r4 :: native_int + r5, r6, r7 :: bit + r8 :: bool + r9 :: bit +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to short_int + r2 = r1 & 1 + r3 = r2 == 0 + r4 = y & 1 + r5 = r4 == 0 + r6 = r3 & r5 + if r6 goto L1 else goto L2 :: bool +L1: + r7 = r1 < y :: signed + r8 = r7 + goto L3 +L2: + r9 = CPyTagged_IsLt_(r1, y) + r8 = r9 +L3: + return r8 +def lt2(x, y): + x :: int + y, r0 :: bool + r1 :: short_int + r2 :: native_int + r3 :: bit + r4 :: native_int + r5, r6, r7 :: bit + r8 :: bool + r9 :: bit +L0: + r0 = y << 1 + r1 = extend r0: builtins.bool to short_int + r2 = x & 1 + r3 = r2 == 0 + r4 = r1 & 1 + r5 = r4 == 0 + r6 = r3 & r5 + if r6 goto L1 else goto L2 :: bool +L1: + r7 = x < r1 :: signed + r8 = r7 + goto L3 +L2: + r9 = CPyTagged_IsLt_(x, r1) + r8 = r9 +L3: + return r8 +def gt1(x, y): + x :: bool + y, r0 :: int64 + r1 :: bit +L0: + r0 = extend x: builtins.bool to int64 + r1 = r0 < y :: signed + return r1 +def gt2(x, y): + x :: int64 + y :: bool + r0 :: int64 + r1 :: bit +L0: + r0 = extend y: builtins.bool to int64 + r1 = x < r0 :: signed + return r1 + +[case testBoolBitwise] +from mypy_extensions import i64 +def bitand(x: bool, y: bool) -> bool: + b = x & y + return b +def bitor(x: bool, y: bool) -> bool: + b = x | y + return b +def bitxor(x: bool, y: bool) -> bool: + b = x ^ y + return b +def invert(x: bool) -> int: + return ~x +def mixed_bitand(x: i64, y: bool) -> i64: + return x & y +[out] +def bitand(x, y): + x, y, r0, b :: bool +L0: + r0 = x & y + b = r0 + return b +def bitor(x, y): + x, y, r0, b :: bool +L0: + r0 = x | y + b = r0 + return b +def bitxor(x, y): + x, y, r0, b :: bool +L0: + r0 = x ^ y + b = r0 + return b +def invert(x): + x, r0 :: bool + r1, r2 :: int +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = CPyTagged_Invert(r1) + return r2 +def mixed_bitand(x, y): + x :: int64 + y :: bool + r0, r1 :: int64 +L0: + r0 = extend y: builtins.bool to int64 + r1 = x & r0 + return r1 + +[case testBoolArithmetic] +def add(x: bool, y: bool) -> int: + z = x + y + return z +def mixed(b: bool, n: int) -> int: + z = b + n + z -= b + z = z * b + return z +def negate(b: bool) -> int: + return -b +def unary_plus(b: bool) -> int: + x = +b + return x +[out] +def add(x, y): + x, y, r0 :: bool + r1 :: int + r2 :: bool + r3, r4, z :: int +L0: + r0 = x << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = y << 1 + r3 = extend r2: builtins.bool to builtins.int + r4 = CPyTagged_Add(r1, r3) + z = r4 + return z +def mixed(b, n): + b :: bool + n :: int + r0 :: bool + r1, r2, z :: int + r3 :: bool + r4, r5 :: int + r6 :: bool + r7, r8 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = CPyTagged_Add(r1, n) + z = r2 + r3 = b << 1 + r4 = extend r3: builtins.bool to builtins.int + r5 = CPyTagged_Subtract(z, r4) + z = r5 + r6 = b << 1 + r7 = extend r6: builtins.bool to builtins.int + r8 = CPyTagged_Multiply(z, r7) + z = r8 + return z +def negate(b): + b, r0 :: bool + r1, r2 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + r2 = CPyTagged_Negate(r1) + return r2 +def unary_plus(b): + b, r0 :: bool + r1, x :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + x = r1 + return x diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index 5a574ac44354..b9501c32180d 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -181,94 +181,6 @@ L0: o.x = r1; r2 = is_error return o -[case testSubclassSpecialize2] -class A: - def foo(self, x: int) -> object: - return str(x) -class B(A): - def foo(self, x: object) -> object: - return x -class C(B): - def foo(self, x: object) -> int: - return id(x) - -def use_a(x: A, y: int) -> object: - return x.foo(y) - -def use_b(x: B, y: object) -> object: - return x.foo(y) - -def use_c(x: C, y: object) -> int: - return x.foo(y) -[out] -def A.foo(self, x): - self :: __main__.A - x :: int - r0 :: str -L0: - r0 = CPyTagged_Str(x) - return r0 -def B.foo(self, x): - self :: __main__.B - x :: object -L0: - return x -def B.foo__A_glue(self, x): - self :: __main__.B - x :: int - r0, r1 :: object -L0: - r0 = box(int, x) - r1 = B.foo(self, r0) - return r1 -def C.foo(self, x): - self :: __main__.C - x :: object - r0 :: int -L0: - r0 = CPyTagged_Id(x) - return r0 -def C.foo__B_glue(self, x): - self :: __main__.C - x :: object - r0 :: int - r1 :: object -L0: - r0 = C.foo(self, x) - r1 = box(int, r0) - return r1 -def C.foo__A_glue(self, x): - self :: __main__.C - x :: int - r0 :: object - r1 :: int - r2 :: object -L0: - r0 = box(int, x) - r1 = C.foo(self, r0) - r2 = box(int, r1) - return r2 -def use_a(x, y): - x :: __main__.A - y :: int - r0 :: object -L0: - r0 = x.foo(y) - return r0 -def use_b(x, y): - x :: __main__.B - y, r0 :: object -L0: - r0 = x.foo(y) - return r0 -def use_c(x, y): - x :: __main__.C - y :: object - r0 :: int -L0: - r0 = x.foo(y) - return r0 - [case testSubclass_toplevel] from typing import TypeVar, Generic from mypy_extensions import trait @@ -1333,3 +1245,45 @@ L2: y = 4 L3: return 1 + +[case testIncompatibleDefinitionOfAttributeInSubclass] +from mypy_extensions import trait + +class Base: + x: int + +class Bad1(Base): + x: bool # E: Type of "x" is incompatible with definition in class "Base" + +class Good1(Base): + x: int + +class Good2(Base): + x: int = 0 + +class Good3(Base): + x = 0 + +class Good4(Base): + def __init__(self) -> None: + self.x = 0 + +class Good5(Base): + def __init__(self) -> None: + self.x: int = 0 + +class Base2(Base): + pass + +class Bad2(Base2): + x: bool = False # E: Type of "x" is incompatible with definition in class "Base" + +class Bad3(Base): + x = False # E: Type of "x" is incompatible with definition in class "Base" + +@trait +class T: + y: object + +class E(T): + y: str # E: Type of "y" is incompatible with definition in trait "T" diff --git a/mypyc/test-data/irbuild-constant-fold.test b/mypyc/test-data/irbuild-constant-fold.test index dd75c01443f1..7d9127887aa6 100644 --- a/mypyc/test-data/irbuild-constant-fold.test +++ b/mypyc/test-data/irbuild-constant-fold.test @@ -205,23 +205,13 @@ Y: Final = 2 + 4 def f() -> None: a = X + 1 - # TODO: Constant fold this as well a = Y + 1 [out] def f(): - a, r0 :: int - r1 :: bool - r2 :: int + a :: int L0: a = 12 - r0 = __main__.Y :: static - if is_error(r0) goto L1 else goto L2 -L1: - r1 = raise NameError('value for final name "Y" was not set') - unreachable -L2: - r2 = CPyTagged_Add(r0, 2) - a = r2 + a = 14 return 1 [case testIntConstantFoldingClassFinal] diff --git a/mypyc/test-data/irbuild-dict.test b/mypyc/test-data/irbuild-dict.test index 3e2c295637ab..99643b9451f0 100644 --- a/mypyc/test-data/irbuild-dict.test +++ b/mypyc/test-data/irbuild-dict.test @@ -218,13 +218,17 @@ L0: return r2 [case testDictIterationMethods] -from typing import Dict +from typing import Dict, Union def print_dict_methods(d1: Dict[int, int], d2: Dict[int, int]) -> None: for v in d1.values(): if v in d2: return for k, v in d2.items(): d2[k] += v +def union_of_dicts(d: Union[Dict[str, int], Dict[str, str]]) -> None: + new = {} + for k, v in d.items(): + new[k] = int(v) [out] def print_dict_methods(d1, d2): d1, d2 :: dict @@ -314,6 +318,58 @@ L11: r34 = CPy_NoErrOccured() L12: return 1 +def union_of_dicts(d): + d, r0, new :: dict + r1 :: short_int + r2 :: native_int + r3 :: short_int + r4 :: object + r5 :: tuple[bool, short_int, object, object] + r6 :: short_int + r7 :: bool + r8, r9 :: object + r10 :: str + r11 :: union[int, str] + k :: str + v :: union[int, str] + r12, r13 :: object + r14 :: int + r15 :: object + r16 :: int32 + r17, r18, r19 :: bit +L0: + r0 = PyDict_New() + new = r0 + r1 = 0 + r2 = PyDict_Size(d) + r3 = r2 << 1 + r4 = CPyDict_GetItemsIter(d) +L1: + r5 = CPyDict_NextItem(r4, r1) + r6 = r5[1] + r1 = r6 + r7 = r5[0] + if r7 goto L2 else goto L4 :: bool +L2: + r8 = r5[2] + r9 = r5[3] + r10 = cast(str, r8) + r11 = cast(union[int, str], r9) + k = r10 + v = r11 + r12 = load_address PyLong_Type + r13 = PyObject_CallFunctionObjArgs(r12, v, 0) + r14 = unbox(int, r13) + r15 = box(int, r14) + r16 = CPyDict_SetItem(new, k, r15) + r17 = r16 >= 0 :: signed +L3: + r18 = CPyDict_CheckSize(d, r3) + goto L1 +L4: + r19 = CPy_NoErrOccured() +L5: + return 1 [case testDictLoadAddress] def f() -> None: diff --git a/mypyc/test-data/irbuild-dunders.test b/mypyc/test-data/irbuild-dunders.test index d06a570aa7b0..82f04dcdf687 100644 --- a/mypyc/test-data/irbuild-dunders.test +++ b/mypyc/test-data/irbuild-dunders.test @@ -148,11 +148,27 @@ class C: def __float__(self) -> float: return 4.0 + def __pos__(self) -> int: + return 5 + + def __abs__(self) -> int: + return 6 + + def __bool__(self) -> bool: + return False + + def __complex__(self) -> complex: + return 7j + def f(c: C) -> None: -c ~c int(c) float(c) + +c + abs(c) + bool(c) + complex(c) [out] def C.__neg__(self): self :: __main__.C @@ -172,16 +188,39 @@ def C.__float__(self): L0: r0 = 4.0 return r0 +def C.__pos__(self): + self :: __main__.C +L0: + return 10 +def C.__abs__(self): + self :: __main__.C +L0: + return 12 +def C.__bool__(self): + self :: __main__.C +L0: + return 0 +def C.__complex__(self): + self :: __main__.C + r0 :: object +L0: + r0 = 7j + return r0 def f(c): c :: __main__.C - r0, r1 :: int - r2, r3, r4, r5 :: object + r0, r1, r2 :: int + r3 :: float + r4, r5 :: int + r6 :: bool + r7 :: object L0: r0 = c.__neg__() r1 = c.__invert__() - r2 = load_address PyLong_Type - r3 = PyObject_CallFunctionObjArgs(r2, c, 0) - r4 = load_address PyFloat_Type - r5 = PyObject_CallFunctionObjArgs(r4, c, 0) + r2 = c.__int__() + r3 = c.__float__() + r4 = c.__pos__() + r5 = c.__abs__() + r6 = c.__bool__() + r7 = c.__complex__() return 1 diff --git a/mypyc/test-data/irbuild-glue-methods.test b/mypyc/test-data/irbuild-glue-methods.test new file mode 100644 index 000000000000..6d749bf5dd84 --- /dev/null +++ b/mypyc/test-data/irbuild-glue-methods.test @@ -0,0 +1,437 @@ +# Test cases for glue methods. +# +# These are used when subclass method signature has a different representation +# compared to the base class. + +[case testSubclassSpecialize2] +class A: + def foo(self, x: int) -> object: + return str(x) +class B(A): + def foo(self, x: object) -> object: + return x +class C(B): + def foo(self, x: object) -> int: + return id(x) + +def use_a(x: A, y: int) -> object: + return x.foo(y) + +def use_b(x: B, y: object) -> object: + return x.foo(y) + +def use_c(x: C, y: object) -> int: + return x.foo(y) +[out] +def A.foo(self, x): + self :: __main__.A + x :: int + r0 :: str +L0: + r0 = CPyTagged_Str(x) + return r0 +def B.foo(self, x): + self :: __main__.B + x :: object +L0: + return x +def B.foo__A_glue(self, x): + self :: __main__.B + x :: int + r0, r1 :: object +L0: + r0 = box(int, x) + r1 = B.foo(self, r0) + return r1 +def C.foo(self, x): + self :: __main__.C + x :: object + r0 :: int +L0: + r0 = CPyTagged_Id(x) + return r0 +def C.foo__B_glue(self, x): + self :: __main__.C + x :: object + r0 :: int + r1 :: object +L0: + r0 = C.foo(self, x) + r1 = box(int, r0) + return r1 +def C.foo__A_glue(self, x): + self :: __main__.C + x :: int + r0 :: object + r1 :: int + r2 :: object +L0: + r0 = box(int, x) + r1 = C.foo(self, r0) + r2 = box(int, r1) + return r2 +def use_a(x, y): + x :: __main__.A + y :: int + r0 :: object +L0: + r0 = x.foo(y) + return r0 +def use_b(x, y): + x :: __main__.B + y, r0 :: object +L0: + r0 = x.foo(y) + return r0 +def use_c(x, y): + x :: __main__.C + y :: object + r0 :: int +L0: + r0 = x.foo(y) + return r0 + +[case testPropertyDerivedGen] +from typing import Callable +class BaseProperty: + @property + def value(self) -> object: + return self._incrementer + + @property + def bad_value(self) -> object: + return self._incrementer + + @property + def next(self) -> BaseProperty: + return BaseProperty(self._incrementer + 1) + + def __init__(self, value: int) -> None: + self._incrementer = value + +class DerivedProperty(BaseProperty): + @property + def value(self) -> int: + return self._incrementer + + @property + def bad_value(self) -> object: + return self._incrementer + + @property + def next(self) -> DerivedProperty: + return DerivedProperty(self._incr_func, self._incr_func(self.value)) + + def __init__(self, incr_func: Callable[[int], int], value: int) -> None: + BaseProperty.__init__(self, value) + self._incr_func = incr_func + + +class AgainProperty(DerivedProperty): + @property + def next(self) -> AgainProperty: + return AgainProperty(self._incr_func, self._incr_func(self._incr_func(self.value))) + + @property + def bad_value(self) -> int: + return self._incrementer +[out] +def BaseProperty.value(self): + self :: __main__.BaseProperty + r0 :: int + r1 :: object +L0: + r0 = self._incrementer + r1 = box(int, r0) + return r1 +def BaseProperty.bad_value(self): + self :: __main__.BaseProperty + r0 :: int + r1 :: object +L0: + r0 = self._incrementer + r1 = box(int, r0) + return r1 +def BaseProperty.next(self): + self :: __main__.BaseProperty + r0, r1 :: int + r2 :: __main__.BaseProperty +L0: + r0 = borrow self._incrementer + r1 = CPyTagged_Add(r0, 2) + keep_alive self + r2 = BaseProperty(r1) + return r2 +def BaseProperty.__init__(self, value): + self :: __main__.BaseProperty + value :: int +L0: + self._incrementer = value + return 1 +def DerivedProperty.value(self): + self :: __main__.DerivedProperty + r0 :: int +L0: + r0 = self._incrementer + return r0 +def DerivedProperty.value__BaseProperty_glue(__mypyc_self__): + __mypyc_self__ :: __main__.DerivedProperty + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.value + r1 = box(int, r0) + return r1 +def DerivedProperty.bad_value(self): + self :: __main__.DerivedProperty + r0 :: int + r1 :: object +L0: + r0 = self._incrementer + r1 = box(int, r0) + return r1 +def DerivedProperty.next(self): + self :: __main__.DerivedProperty + r0 :: object + r1 :: int + r2, r3, r4 :: object + r5 :: int + r6 :: __main__.DerivedProperty +L0: + r0 = self._incr_func + r1 = self.value + r2 = self._incr_func + r3 = box(int, r1) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + r6 = DerivedProperty(r0, r5) + return r6 +def DerivedProperty.next__BaseProperty_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.DerivedProperty +L0: + r0 = __mypyc_self__.next + return r0 +def DerivedProperty.__init__(self, incr_func, value): + self :: __main__.DerivedProperty + incr_func :: object + value :: int + r0 :: None +L0: + r0 = BaseProperty.__init__(self, value) + self._incr_func = incr_func + return 1 +def AgainProperty.next(self): + self :: __main__.AgainProperty + r0 :: object + r1 :: int + r2, r3, r4 :: object + r5 :: int + r6, r7, r8 :: object + r9 :: int + r10 :: __main__.AgainProperty +L0: + r0 = self._incr_func + r1 = self.value + r2 = self._incr_func + r3 = box(int, r1) + r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) + r5 = unbox(int, r4) + r6 = self._incr_func + r7 = box(int, r5) + r8 = PyObject_CallFunctionObjArgs(r6, r7, 0) + r9 = unbox(int, r8) + r10 = AgainProperty(r0, r9) + return r10 +def AgainProperty.next__DerivedProperty_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.AgainProperty +L0: + r0 = __mypyc_self__.next + return r0 +def AgainProperty.next__BaseProperty_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.AgainProperty +L0: + r0 = __mypyc_self__.next + return r0 +def AgainProperty.bad_value(self): + self :: __main__.AgainProperty + r0 :: int +L0: + r0 = self._incrementer + return r0 +def AgainProperty.bad_value__DerivedProperty_glue(__mypyc_self__): + __mypyc_self__ :: __main__.AgainProperty + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.bad_value + r1 = box(int, r0) + return r1 +def AgainProperty.bad_value__BaseProperty_glue(__mypyc_self__): + __mypyc_self__ :: __main__.AgainProperty + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.bad_value + r1 = box(int, r0) + return r1 + +[case testPropertyTraitSubclassing] +from mypy_extensions import trait +@trait +class SubclassedTrait: + @property + def this(self) -> SubclassedTrait: + return self + + @property + def boxed(self) -> object: + return 3 + +class DerivingObject(SubclassedTrait): + @property + def this(self) -> DerivingObject: + return self + + @property + def boxed(self) -> int: + return 5 +[out] +def SubclassedTrait.this(self): + self :: __main__.SubclassedTrait +L0: + return self +def SubclassedTrait.boxed(self): + self :: __main__.SubclassedTrait + r0 :: object +L0: + r0 = object 3 + return r0 +def DerivingObject.this(self): + self :: __main__.DerivingObject +L0: + return self +def DerivingObject.this__SubclassedTrait_glue(__mypyc_self__): + __mypyc_self__, r0 :: __main__.DerivingObject +L0: + r0 = __mypyc_self__.this + return r0 +def DerivingObject.boxed(self): + self :: __main__.DerivingObject +L0: + return 10 +def DerivingObject.boxed__SubclassedTrait_glue(__mypyc_self__): + __mypyc_self__ :: __main__.DerivingObject + r0 :: int + r1 :: object +L0: + r0 = __mypyc_self__.boxed + r1 = box(int, r0) + return r1 + +[case testI64GlueWithExtraDefaultArg] +from mypy_extensions import i64 + +class C: + def f(self) -> None: pass + +class D(C): + def f(self, x: i64 = 44) -> None: pass +[out] +def C.f(self): + self :: __main__.C +L0: + return 1 +def D.f(self, x, __bitmap): + self :: __main__.D + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 44 +L2: + return 1 +def D.f__C_glue(self): + self :: __main__.D + r0 :: None +L0: + r0 = D.f(self, 0, 0) + return r0 + +[case testI64GlueWithSecondDefaultArg] +from mypy_extensions import i64 + +class C: + def f(self, x: i64 = 11) -> None: pass +class D(C): + def f(self, x: i64 = 12, y: i64 = 13) -> None: pass +[out] +def C.f(self, x, __bitmap): + self :: __main__.C + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 11 +L2: + return 1 +def D.f(self, x, y, __bitmap): + self :: __main__.D + x, y :: int64 + __bitmap, r0 :: uint32 + r1 :: bit + r2 :: uint32 + r3 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 12 +L2: + r2 = __bitmap & 2 + r3 = r2 == 0 + if r3 goto L3 else goto L4 :: bool +L3: + y = 13 +L4: + return 1 +def D.f__C_glue(self, x, __bitmap): + self :: __main__.D + x :: int64 + __bitmap :: uint32 + r0 :: None +L0: + r0 = D.f(self, x, 0, __bitmap) + return r0 + +[case testI64GlueWithInvalidOverride] +from mypy_extensions import i64 + +class C: + def f(self, x: i64, y: i64 = 5) -> None: pass + def ff(self, x: int) -> None: pass +class CC(C): + def f(self, x: i64 = 12, y: i64 = 5) -> None: pass # Line 7 + def ff(self, x: int = 12) -> None: pass + +class D: + def f(self, x: int) -> None: pass +class DD(D): + def f(self, x: i64) -> None: pass # Line 13 + +class E: + def f(self, x: i64) -> None: pass +class EE(E): + def f(self, x: int) -> None: pass # Line 18 +[out] +main:7: error: An argument with type "int64" cannot be given a default value in a method override +main:13: error: Incompatible argument type "int64" (base class has type "int") +main:18: error: Incompatible argument type "int" (base class has type "int64") diff --git a/mypyc/test-data/irbuild-i32.test b/mypyc/test-data/irbuild-i32.test new file mode 100644 index 000000000000..7ea3c0864728 --- /dev/null +++ b/mypyc/test-data/irbuild-i32.test @@ -0,0 +1,534 @@ +# Test cases for i32 native ints. Focus on things that are different from i64; no need to +# duplicate all i64 test cases here. + +[case testI32BinaryOp] +from mypy_extensions import i32 + +def add_op(x: i32, y: i32) -> i32: + x = y + x + y = x + 5 + y += x + y += 7 + x = 5 + y + return x +def compare(x: i32, y: i32) -> None: + a = x == y + b = x == -5 + c = x < y + d = x < -5 + e = -5 == x + f = -5 < x +[out] +def add_op(x, y): + x, y, r0, r1, r2, r3, r4 :: int32 +L0: + r0 = y + x + x = r0 + r1 = x + 5 + y = r1 + r2 = y + x + y = r2 + r3 = y + 7 + y = r3 + r4 = 5 + y + x = r4 + return x +def compare(x, y): + x, y :: int32 + r0 :: bit + a :: bool + r1 :: bit + b :: bool + r2 :: bit + c :: bool + r3 :: bit + d :: bool + r4 :: bit + e :: bool + r5 :: bit + f :: bool +L0: + r0 = x == y + a = r0 + r1 = x == -5 + b = r1 + r2 = x < y :: signed + c = r2 + r3 = x < -5 :: signed + d = r3 + r4 = -5 == x + e = r4 + r5 = -5 < x :: signed + f = r5 + return 1 + +[case testI32UnaryOp] +from mypy_extensions import i32 + +def unary(x: i32) -> i32: + y = -x + x = ~y + y = +x + return y +[out] +def unary(x): + x, r0, y, r1 :: int32 +L0: + r0 = 0 - x + y = r0 + r1 = y ^ -1 + x = r1 + y = x + return y + +[case testI32DivisionByConstant] +from mypy_extensions import i32 + +def div_by_constant(x: i32) -> i32: + x = x // 5 + x //= 17 + return x +[out] +def div_by_constant(x): + x, r0, r1 :: int32 + r2, r3, r4 :: bit + r5 :: int32 + r6 :: bit + r7, r8, r9 :: int32 + r10, r11, r12 :: bit + r13 :: int32 + r14 :: bit + r15 :: int32 +L0: + r0 = x / 5 + r1 = r0 + r2 = x < 0 :: signed + r3 = 5 < 0 :: signed + r4 = r2 == r3 + if r4 goto L3 else goto L1 :: bool +L1: + r5 = r1 * 5 + r6 = r5 == x + if r6 goto L3 else goto L2 :: bool +L2: + r7 = r1 - 1 + r1 = r7 +L3: + x = r1 + r8 = x / 17 + r9 = r8 + r10 = x < 0 :: signed + r11 = 17 < 0 :: signed + r12 = r10 == r11 + if r12 goto L6 else goto L4 :: bool +L4: + r13 = r9 * 17 + r14 = r13 == x + if r14 goto L6 else goto L5 :: bool +L5: + r15 = r9 - 1 + r9 = r15 +L6: + x = r9 + return x + +[case testI32ModByConstant] +from mypy_extensions import i32 + +def mod_by_constant(x: i32) -> i32: + x = x % 5 + x %= 17 + return x +[out] +def mod_by_constant(x): + x, r0, r1 :: int32 + r2, r3, r4, r5 :: bit + r6, r7, r8 :: int32 + r9, r10, r11, r12 :: bit + r13 :: int32 +L0: + r0 = x % 5 + r1 = r0 + r2 = x < 0 :: signed + r3 = 5 < 0 :: signed + r4 = r2 == r3 + if r4 goto L3 else goto L1 :: bool +L1: + r5 = r1 == 0 + if r5 goto L3 else goto L2 :: bool +L2: + r6 = r1 + 5 + r1 = r6 +L3: + x = r1 + r7 = x % 17 + r8 = r7 + r9 = x < 0 :: signed + r10 = 17 < 0 :: signed + r11 = r9 == r10 + if r11 goto L6 else goto L4 :: bool +L4: + r12 = r8 == 0 + if r12 goto L6 else goto L5 :: bool +L5: + r13 = r8 + 17 + r8 = r13 +L6: + x = r8 + return x + +[case testI32DivModByVariable] +from mypy_extensions import i32 + +def divmod(x: i32, y: i32) -> i32: + a = x // y + return a % y +[out] +def divmod(x, y): + x, y, r0, a, r1 :: int32 +L0: + r0 = CPyInt32_Divide(x, y) + a = r0 + r1 = CPyInt32_Remainder(a, y) + return r1 + +[case testI32BoxAndUnbox] +from typing import Any +from mypy_extensions import i32 + +def f(x: Any) -> Any: + y: i32 = x + return y +[out] +def f(x): + x :: object + r0, y :: int32 + r1 :: object +L0: + r0 = unbox(int32, x) + y = r0 + r1 = box(int32, y) + return r1 + +[case testI32MixedCompare1_64bit] +from mypy_extensions import i32 +def f(x: int, y: i32) -> bool: + return x == y +[out] +def f(x, y): + x :: int + y :: int32 + r0 :: native_int + r1, r2, r3 :: bit + r4 :: native_int + r5, r6 :: int32 + r7 :: bit +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = x < 4294967296 :: signed + if r2 goto L2 else goto L4 :: bool +L2: + r3 = x >= -4294967296 :: signed + if r3 goto L3 else goto L4 :: bool +L3: + r4 = x >> 1 + r5 = truncate r4: native_int to int32 + r6 = r5 + goto L5 +L4: + CPyInt32_Overflow() + unreachable +L5: + r7 = r6 == y + return r7 + +[case testI32MixedCompare2_64bit] +from mypy_extensions import i32 +def f(x: i32, y: int) -> bool: + return x == y +[out] +def f(x, y): + x :: int32 + y :: int + r0 :: native_int + r1, r2, r3 :: bit + r4 :: native_int + r5, r6 :: int32 + r7 :: bit +L0: + r0 = y & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = y < 4294967296 :: signed + if r2 goto L2 else goto L4 :: bool +L2: + r3 = y >= -4294967296 :: signed + if r3 goto L3 else goto L4 :: bool +L3: + r4 = y >> 1 + r5 = truncate r4: native_int to int32 + r6 = r5 + goto L5 +L4: + CPyInt32_Overflow() + unreachable +L5: + r7 = x == r6 + return r7 + +[case testI32MixedCompare_32bit] +from mypy_extensions import i32 +def f(x: int, y: i32) -> bool: + return x == y +[out] +def f(x, y): + x :: int + y :: int32 + r0 :: native_int + r1 :: bit + r2, r3 :: int32 + r4 :: ptr + r5 :: c_ptr + r6 :: int32 + r7 :: bit +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt32(r5) + r3 = r6 + keep_alive x +L3: + r7 = r3 == y + return r7 + +[case testI32ConvertToInt_64bit] +from mypy_extensions import i32 + +def i32_to_int(a: i32) -> int: + return a +[out] +def i32_to_int(a): + a :: int32 + r0 :: native_int + r1 :: int +L0: + r0 = extend signed a: int32 to native_int + r1 = r0 << 1 + return r1 + +[case testI32ConvertToInt_32bit] +from mypy_extensions import i32 + +def i32_to_int(a: i32) -> int: + return a +[out] +def i32_to_int(a): + a :: int32 + r0, r1 :: bit + r2, r3, r4 :: int +L0: + r0 = a <= 1073741823 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = a >= -1073741824 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromSsize_t(a) + r3 = r2 + goto L4 +L3: + r4 = a << 1 + r3 = r4 +L4: + return r3 + +[case testI32OperatorAssignmentMixed_64bit] +from mypy_extensions import i32 + +def f(a: i32) -> None: + x = 0 + x += a +[out] +def f(a): + a :: int32 + x :: int + r0 :: native_int + r1, r2, r3 :: bit + r4 :: native_int + r5, r6, r7 :: int32 + r8 :: native_int + r9 :: int +L0: + x = 0 + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = x < 4294967296 :: signed + if r2 goto L2 else goto L4 :: bool +L2: + r3 = x >= -4294967296 :: signed + if r3 goto L3 else goto L4 :: bool +L3: + r4 = x >> 1 + r5 = truncate r4: native_int to int32 + r6 = r5 + goto L5 +L4: + CPyInt32_Overflow() + unreachable +L5: + r7 = r6 + a + r8 = extend signed r7: int32 to native_int + r9 = r8 << 1 + x = r9 + return 1 + +[case testI32InitializeFromLiteral] +from mypy_extensions import i32, i64 + +def f() -> None: + x: i32 = 0 + y: i32 = -127 + z: i32 = 5 + 7 +[out] +def f(): + x, y, z :: int32 +L0: + x = 0 + y = -127 + z = 12 + return 1 + +[case testI32ExplicitConversionFromNativeInt] +from mypy_extensions import i64, i32 + +def from_i32(x: i32) -> i32: + return i32(x) + +def from_i64(x: i64) -> i32: + return i32(x) +[out] +def from_i32(x): + x :: int32 +L0: + return x +def from_i64(x): + x :: int64 + r0 :: int32 +L0: + r0 = truncate x: int64 to int32 + return r0 + +[case testI32ExplicitConversionFromInt_64bit] +from mypy_extensions import i32 + +def f(x: int) -> i32: + return i32(x) +[out] +def f(x): + x :: int + r0 :: native_int + r1, r2, r3 :: bit + r4 :: native_int + r5, r6 :: int32 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = x < 4294967296 :: signed + if r2 goto L2 else goto L4 :: bool +L2: + r3 = x >= -4294967296 :: signed + if r3 goto L3 else goto L4 :: bool +L3: + r4 = x >> 1 + r5 = truncate r4: native_int to int32 + r6 = r5 + goto L5 +L4: + CPyInt32_Overflow() + unreachable +L5: + return r6 + +[case testI32ExplicitConversionFromLiteral] +from mypy_extensions import i32 + +def f() -> None: + x = i32(0) + y = i32(11) + z = i32(-3) +[out] +def f(): + x, y, z :: int32 +L0: + x = 0 + y = 11 + z = -3 + return 1 + +[case testI32ExplicitConversionFromVariousTypes] +from mypy_extensions import i32 + +def bool_to_i32(b: bool) -> i32: + return i32(b) + +def str_to_i32(s: str) -> i32: + return i32(s) + +class C: + def __int__(self) -> i32: + return 5 + +def instance_to_i32(c: C) -> i32: + return i32(c) + +def float_to_i32(x: float) -> i32: + return i32(x) +[out] +def bool_to_i32(b): + b :: bool + r0 :: int32 +L0: + r0 = extend b: builtins.bool to int32 + return r0 +def str_to_i32(s): + s :: str + r0 :: object + r1 :: int32 +L0: + r0 = CPyLong_FromStr(s) + r1 = unbox(int32, r0) + return r1 +def C.__int__(self): + self :: __main__.C +L0: + return 5 +def instance_to_i32(c): + c :: __main__.C + r0 :: int32 +L0: + r0 = c.__int__() + return r0 +def float_to_i32(x): + x :: float + r0 :: object + r1 :: int32 +L0: + r0 = CPyLong_FromFloat(x) + r1 = unbox(int32, r0) + return r1 diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test new file mode 100644 index 000000000000..f616893d8fe5 --- /dev/null +++ b/mypyc/test-data/irbuild-i64.test @@ -0,0 +1,1972 @@ +[case testI64Basics] +from mypy_extensions import i64 + +def f() -> i64: + x: i64 = 5 + y = x + return y +[out] +def f(): + x, y :: int64 +L0: + x = 5 + y = x + return y + +[case testI64Compare] +from mypy_extensions import i64 + +def min(x: i64, y: i64) -> i64: + if x < y: + return x + else: + return y + +def all_comparisons(x: i64) -> int: + if x == 2: + y = 10 + elif 3 != x: + y = 11 + elif x > 4: + y = 12 + elif 6 >= x: + y = 13 + elif x < 5: + y = 14 + elif 6 <= x: + y = 15 + else: + y = 16 + return y +[out] +def min(x, y): + x, y :: int64 + r0 :: bit +L0: + r0 = x < y :: signed + if r0 goto L1 else goto L2 :: bool +L1: + return x +L2: + return y +L3: + unreachable +def all_comparisons(x): + x :: int64 + r0 :: bit + y :: int + r1, r2, r3, r4, r5 :: bit +L0: + r0 = x == 2 + if r0 goto L1 else goto L2 :: bool +L1: + y = 20 + goto L18 +L2: + r1 = 3 != x + if r1 goto L3 else goto L4 :: bool +L3: + y = 22 + goto L17 +L4: + r2 = x > 4 :: signed + if r2 goto L5 else goto L6 :: bool +L5: + y = 24 + goto L16 +L6: + r3 = 6 >= x :: signed + if r3 goto L7 else goto L8 :: bool +L7: + y = 26 + goto L15 +L8: + r4 = x < 5 :: signed + if r4 goto L9 else goto L10 :: bool +L9: + y = 28 + goto L14 +L10: + r5 = 6 <= x :: signed + if r5 goto L11 else goto L12 :: bool +L11: + y = 30 + goto L13 +L12: + y = 32 +L13: +L14: +L15: +L16: +L17: +L18: + return y + +[case testI64Arithmetic] +from mypy_extensions import i64 + +def f(x: i64, y: i64) -> i64: + z = x + y + return y - z +[out] +def f(x, y): + x, y, r0, z, r1 :: int64 +L0: + r0 = x + y + z = r0 + r1 = y - z + return r1 + +[case testI64Negation] +from mypy_extensions import i64 + +def f() -> i64: + i: i64 = -3 + return -i +[out] +def f(): + i, r0 :: int64 +L0: + i = -3 + r0 = 0 - i + return r0 + +[case testI64MoreUnaryOps] +from mypy_extensions import i64 + +def unary(x: i64) -> i64: + y = ~x + x = +y + return x +[out] +def unary(x): + x, r0, y :: int64 +L0: + r0 = x ^ -1 + y = r0 + x = y + return x + +[case testI64BoxingAndUnboxing] +from typing import Any +from mypy_extensions import i64 + +def f(a: Any) -> None: + b: i64 = a + a = b +[out] +def f(a): + a :: object + r0, b :: int64 + r1 :: object +L0: + r0 = unbox(int64, a) + b = r0 + r1 = box(int64, b) + a = r1 + return 1 + +[case testI64ListGetSetItem] +from typing import List +from mypy_extensions import i64 + +def get(a: List[i64], i: i64) -> i64: + return a[i] +def set(a: List[i64], i: i64, x: i64) -> None: + a[i] = x +[out] +def get(a, i): + a :: list + i :: int64 + r0 :: object + r1 :: int64 +L0: + r0 = CPyList_GetItemInt64(a, i) + r1 = unbox(int64, r0) + return r1 +def set(a, i, x): + a :: list + i, x :: int64 + r0 :: object + r1 :: bit +L0: + r0 = box(int64, x) + r1 = CPyList_SetItemInt64(a, i, r0) + return 1 + +[case testI64MixedArithmetic] +from mypy_extensions import i64 + +def f() -> i64: + a: i64 = 1 + b = a + 2 + return 3 - b +[out] +def f(): + a, r0, b, r1 :: int64 +L0: + a = 1 + r0 = a + 2 + b = r0 + r1 = 3 - b + return r1 + +[case testI64MixedComparison] +from mypy_extensions import i64 + +def f(a: i64) -> i64: + if a < 3: + return 1 + elif 3 < a: + return 2 + return 3 +[out] +def f(a): + a :: int64 + r0, r1 :: bit +L0: + r0 = a < 3 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + r1 = 3 < a :: signed + if r1 goto L3 else goto L4 :: bool +L3: + return 2 +L4: +L5: + return 3 + +[case testI64InplaceOperations] +from mypy_extensions import i64 + +def add(a: i64) -> i64: + b = a + b += 1 + a += b + return a +def others(a: i64, b: i64) -> i64: + a -= b + a *= b + a &= b + a |= b + a ^= b + a <<= b + a >>= b + return a +[out] +def add(a): + a, b, r0, r1 :: int64 +L0: + b = a + r0 = b + 1 + b = r0 + r1 = a + b + a = r1 + return a +def others(a, b): + a, b, r0, r1, r2, r3, r4, r5, r6 :: int64 +L0: + r0 = a - b + a = r0 + r1 = a * b + a = r1 + r2 = a & b + a = r2 + r3 = a | b + a = r3 + r4 = a ^ b + a = r4 + r5 = a << b + a = r5 + r6 = a >> b + a = r6 + return a + +[case testI64BitwiseOps] +from mypy_extensions import i64 + +def forward(a: i64, b: i64) -> i64: + b = a & 1 + a = b | 2 + b = a ^ 3 + a = b << 4 + b = a >> 5 + return b + +def reverse(a: i64, b: i64) -> i64: + b = 1 & a + a = 2 | b + b = 3 ^ a + a = 4 << b + b = 5 >> a + return b + +def unary(a: i64) -> i64: + return ~a +[out] +def forward(a, b): + a, b, r0, r1, r2, r3, r4 :: int64 +L0: + r0 = a & 1 + b = r0 + r1 = b | 2 + a = r1 + r2 = a ^ 3 + b = r2 + r3 = b << 4 + a = r3 + r4 = a >> 5 + b = r4 + return b +def reverse(a, b): + a, b, r0, r1, r2, r3, r4 :: int64 +L0: + r0 = 1 & a + b = r0 + r1 = 2 | b + a = r1 + r2 = 3 ^ a + b = r2 + r3 = 4 << b + a = r3 + r4 = 5 >> a + b = r4 + return b +def unary(a): + a, r0 :: int64 +L0: + r0 = a ^ -1 + return r0 + +[case testI64Division] +from mypy_extensions import i64 + +def constant_divisor(x: i64) -> i64: + return x // 7 +def variable_divisor(x: i64, y: i64) -> i64: + return x // y +def constant_lhs(x: i64) -> i64: + return 27 // x +def divide_by_neg_one(x: i64) -> i64: + return x // -1 +def divide_by_zero(x: i64) -> i64: + return x // 0 +[out] +def constant_divisor(x): + x, r0, r1 :: int64 + r2, r3, r4 :: bit + r5 :: int64 + r6 :: bit + r7 :: int64 +L0: + r0 = x / 7 + r1 = r0 + r2 = x < 0 :: signed + r3 = 7 < 0 :: signed + r4 = r2 == r3 + if r4 goto L3 else goto L1 :: bool +L1: + r5 = r1 * 7 + r6 = r5 == x + if r6 goto L3 else goto L2 :: bool +L2: + r7 = r1 - 1 + r1 = r7 +L3: + return r1 +def variable_divisor(x, y): + x, y, r0 :: int64 +L0: + r0 = CPyInt64_Divide(x, y) + return r0 +def constant_lhs(x): + x, r0 :: int64 +L0: + r0 = CPyInt64_Divide(27, x) + return r0 +def divide_by_neg_one(x): + x, r0 :: int64 +L0: + r0 = CPyInt64_Divide(x, -1) + return r0 +def divide_by_zero(x): + x, r0 :: int64 +L0: + r0 = CPyInt64_Divide(x, 0) + return r0 + +[case testI64Mod] +from mypy_extensions import i64 + +def constant_divisor(x: i64) -> i64: + return x % 7 +def variable_divisor(x: i64, y: i64) -> i64: + return x % y +def constant_lhs(x: i64) -> i64: + return 27 % x +def mod_by_zero(x: i64) -> i64: + return x % 0 +[out] +def constant_divisor(x): + x, r0, r1 :: int64 + r2, r3, r4, r5 :: bit + r6 :: int64 +L0: + r0 = x % 7 + r1 = r0 + r2 = x < 0 :: signed + r3 = 7 < 0 :: signed + r4 = r2 == r3 + if r4 goto L3 else goto L1 :: bool +L1: + r5 = r1 == 0 + if r5 goto L3 else goto L2 :: bool +L2: + r6 = r1 + 7 + r1 = r6 +L3: + return r1 +def variable_divisor(x, y): + x, y, r0 :: int64 +L0: + r0 = CPyInt64_Remainder(x, y) + return r0 +def constant_lhs(x): + x, r0 :: int64 +L0: + r0 = CPyInt64_Remainder(27, x) + return r0 +def mod_by_zero(x): + x, r0 :: int64 +L0: + r0 = CPyInt64_Remainder(x, 0) + return r0 + +[case testI64InPlaceDiv] +from mypy_extensions import i64 + +def by_constant(x: i64) -> i64: + x //= 7 + return x +def by_variable(x: i64, y: i64) -> i64: + x //= y + return x +[out] +def by_constant(x): + x, r0, r1 :: int64 + r2, r3, r4 :: bit + r5 :: int64 + r6 :: bit + r7 :: int64 +L0: + r0 = x / 7 + r1 = r0 + r2 = x < 0 :: signed + r3 = 7 < 0 :: signed + r4 = r2 == r3 + if r4 goto L3 else goto L1 :: bool +L1: + r5 = r1 * 7 + r6 = r5 == x + if r6 goto L3 else goto L2 :: bool +L2: + r7 = r1 - 1 + r1 = r7 +L3: + x = r1 + return x +def by_variable(x, y): + x, y, r0 :: int64 +L0: + r0 = CPyInt64_Divide(x, y) + x = r0 + return x + +[case testI64InPlaceMod] +from mypy_extensions import i64 + +def by_constant(x: i64) -> i64: + x %= 7 + return x +def by_variable(x: i64, y: i64) -> i64: + x %= y + return x +[out] +def by_constant(x): + x, r0, r1 :: int64 + r2, r3, r4, r5 :: bit + r6 :: int64 +L0: + r0 = x % 7 + r1 = r0 + r2 = x < 0 :: signed + r3 = 7 < 0 :: signed + r4 = r2 == r3 + if r4 goto L3 else goto L1 :: bool +L1: + r5 = r1 == 0 + if r5 goto L3 else goto L2 :: bool +L2: + r6 = r1 + 7 + r1 = r6 +L3: + x = r1 + return x +def by_variable(x, y): + x, y, r0 :: int64 +L0: + r0 = CPyInt64_Remainder(x, y) + x = r0 + return x + +[case testI64ForRange] +from mypy_extensions import i64 + +def g(a: i64) -> None: pass + +def f(x: i64) -> None: + n: i64 # TODO: Infer the type + for n in range(x): + g(n) +[out] +def g(a): + a :: int64 +L0: + return 1 +def f(x): + x, r0, n :: int64 + r1 :: bit + r2 :: None + r3 :: int64 +L0: + r0 = 0 + n = r0 +L1: + r1 = r0 < x :: signed + if r1 goto L2 else goto L4 :: bool +L2: + r2 = g(n) +L3: + r3 = r0 + 1 + r0 = r3 + n = r3 + goto L1 +L4: + return 1 + +[case testI64ConvertFromInt_64bit] +from mypy_extensions import i64 + +def int_to_i64(a: int) -> i64: + return a +[out] +def int_to_i64(a): + a :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 +L0: + r0 = a & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = a >> 1 + r3 = r2 + goto L3 +L2: + r4 = a ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive a +L3: + return r3 + +[case testI64ConvertToInt_64bit] +from mypy_extensions import i64 + +def i64_to_int(a: i64) -> int: + return a +[out] +def i64_to_int(a): + a :: int64 + r0, r1 :: bit + r2, r3, r4 :: int +L0: + r0 = a <= 4611686018427387903 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = a >= -4611686018427387904 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(a) + r3 = r2 + goto L4 +L3: + r4 = a << 1 + r3 = r4 +L4: + return r3 + +[case testI64ConvertToInt_32bit] +from mypy_extensions import i64 + +def i64_to_int(a: i64) -> int: + return a +[out] +def i64_to_int(a): + a :: int64 + r0, r1 :: bit + r2, r3 :: int + r4 :: native_int + r5 :: int +L0: + r0 = a <= 1073741823 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = a >= -1073741824 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(a) + r3 = r2 + goto L4 +L3: + r4 = truncate a: int64 to native_int + r5 = r4 << 1 + r3 = r5 +L4: + return r3 + +[case testI64Tuple] +from typing import Tuple +from mypy_extensions import i64 + +def f(x: i64, y: i64) -> Tuple[i64, i64]: + return x, y + +def g() -> Tuple[i64, i64]: + # TODO: Avoid boxing and unboxing + return 1, 2 + +def h() -> i64: + x, y = g() + t = g() + return x + y + t[0] +[out] +def f(x, y): + x, y :: int64 + r0 :: tuple[int64, int64] +L0: + r0 = (x, y) + return r0 +def g(): + r0 :: tuple[int, int] + r1 :: object + r2 :: tuple[int64, int64] +L0: + r0 = (2, 4) + r1 = box(tuple[int, int], r0) + r2 = unbox(tuple[int64, int64], r1) + return r2 +def h(): + r0 :: tuple[int64, int64] + r1, x, r2, y :: int64 + r3, t :: tuple[int64, int64] + r4, r5, r6 :: int64 +L0: + r0 = g() + r1 = r0[0] + x = r1 + r2 = r0[1] + y = r2 + r3 = g() + t = r3 + r4 = x + y + r5 = t[0] + r6 = r4 + r5 + return r6 + +[case testI64MixWithTagged1_64bit] +from mypy_extensions import i64 +def f(x: i64, y: int) -> i64: + return x + y +[out] +def f(x, y): + x :: int64 + y :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6, r7 :: int64 +L0: + r0 = y & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = y >> 1 + r3 = r2 + goto L3 +L2: + r4 = y ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive y +L3: + r7 = x + r3 + return r7 + +[case testI64MixWithTagged2_64bit] +from mypy_extensions import i64 +def f(x: int, y: i64) -> i64: + return x + y +[out] +def f(x, y): + x :: int + y :: int64 + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6, r7 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + r7 = r3 + y + return r7 + +[case testI64MixWithTaggedInPlace1_64bit] +from mypy_extensions import i64 +def f(y: i64) -> int: + x = 0 + x += y + return x +[out] +def f(y): + y :: int64 + x :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6, r7 :: int64 + r8, r9 :: bit + r10, r11, r12 :: int +L0: + x = 0 + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + r7 = r3 + y + r8 = r7 <= 4611686018427387903 :: signed + if r8 goto L4 else goto L5 :: bool +L4: + r9 = r7 >= -4611686018427387904 :: signed + if r9 goto L6 else goto L5 :: bool +L5: + r10 = CPyTagged_FromInt64(r7) + r11 = r10 + goto L7 +L6: + r12 = r7 << 1 + r11 = r12 +L7: + x = r11 + return x + +[case testI64MixWithTaggedInPlace2_64bit] +from mypy_extensions import i64 +def f(y: int) -> i64: + x: i64 = 0 + x += y + return x +[out] +def f(y): + y :: int + x :: int64 + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6, r7 :: int64 +L0: + x = 0 + r0 = y & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = y >> 1 + r3 = r2 + goto L3 +L2: + r4 = y ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive y +L3: + r7 = x + r3 + x = r7 + return x + +[case testI64MixedCompare1_64bit] +from mypy_extensions import i64 +def f(x: int, y: i64) -> bool: + return x == y +[out] +def f(x, y): + x :: int + y :: int64 + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 + r7 :: bit +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + r7 = r3 == y + return r7 + +[case testI64MixedCompare2_64bit] +from mypy_extensions import i64 +def f(x: i64, y: int) -> bool: + return x == y +[out] +def f(x, y): + x :: int64 + y :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 + r7 :: bit +L0: + r0 = y & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = y >> 1 + r3 = r2 + goto L3 +L2: + r4 = y ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive y +L3: + r7 = x == r3 + return r7 + +[case testI64MixedCompare_32bit] +from mypy_extensions import i64 +def f(x: int, y: i64) -> bool: + return x == y +[out] +def f(x, y): + x :: int + y :: int64 + r0 :: native_int + r1 :: bit + r2, r3, r4 :: int64 + r5 :: ptr + r6 :: c_ptr + r7 :: int64 + r8 :: bit +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = extend signed x: builtins.int to int64 + r3 = r2 >> 1 + r4 = r3 + goto L3 +L2: + r5 = x ^ 1 + r6 = r5 + r7 = CPyLong_AsInt64(r6) + r4 = r7 + keep_alive x +L3: + r8 = r4 == y + return r8 + +[case testI64AsBool] +from mypy_extensions import i64 +def f(x: i64) -> i64: + if x: + return 5 + elif not x: + return 6 + return 3 +[out] +def f(x): + x :: int64 + r0, r1 :: bit +L0: + r0 = x != 0 + if r0 goto L1 else goto L2 :: bool +L1: + return 5 +L2: + r1 = x != 0 + if r1 goto L4 else goto L3 :: bool +L3: + return 6 +L4: +L5: + return 3 + +[case testI64AssignMixed_64bit] +from mypy_extensions import i64 +def f(x: i64, y: int) -> i64: + x = y + return x +def g(x: i64, y: int) -> int: + y = x + return y +[out] +def f(x, y): + x :: int64 + y :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 +L0: + r0 = y & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = y >> 1 + r3 = r2 + goto L3 +L2: + r4 = y ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive y +L3: + x = r3 + return x +def g(x, y): + x :: int64 + y :: int + r0, r1 :: bit + r2, r3, r4 :: int +L0: + r0 = x <= 4611686018427387903 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = x >= -4611686018427387904 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(x) + r3 = r2 + goto L4 +L3: + r4 = x << 1 + r3 = r4 +L4: + y = r3 + return y + +[case testBorrowOverI64Arithmetic] +from mypy_extensions import i64 + +def add_simple(c: C) -> i64: + return c.x + c.y + +def inplace_add_simple(c: C) -> None: + c.x += c.y + +def add_borrow(d: D) -> i64: + return d.c.x + d.c.y + +class D: + c: C + +class C: + x: i64 + y: i64 +[out] +def add_simple(c): + c :: __main__.C + r0, r1, r2 :: int64 +L0: + r0 = c.x + r1 = c.y + r2 = r0 + r1 + return r2 +def inplace_add_simple(c): + c :: __main__.C + r0, r1, r2 :: int64 + r3 :: bool +L0: + r0 = c.x + r1 = c.y + r2 = r0 + r1 + c.x = r2; r3 = is_error + return 1 +def add_borrow(d): + d :: __main__.D + r0 :: __main__.C + r1 :: int64 + r2 :: __main__.C + r3, r4 :: int64 +L0: + r0 = borrow d.c + r1 = r0.x + r2 = borrow d.c + r3 = r2.y + r4 = r1 + r3 + keep_alive d, d + return r4 + +[case testBorrowOverI64Bitwise] +from mypy_extensions import i64 + +def bitwise_simple(c: C) -> i64: + return c.x | c.y + +def inplace_bitwide_simple(c: C) -> None: + c.x &= c.y + +def bitwise_borrow(d: D) -> i64: + return d.c.x ^ d.c.y + +class D: + c: C + +class C: + x: i64 + y: i64 +[out] +def bitwise_simple(c): + c :: __main__.C + r0, r1, r2 :: int64 +L0: + r0 = c.x + r1 = c.y + r2 = r0 | r1 + return r2 +def inplace_bitwide_simple(c): + c :: __main__.C + r0, r1, r2 :: int64 + r3 :: bool +L0: + r0 = c.x + r1 = c.y + r2 = r0 & r1 + c.x = r2; r3 = is_error + return 1 +def bitwise_borrow(d): + d :: __main__.D + r0 :: __main__.C + r1 :: int64 + r2 :: __main__.C + r3, r4 :: int64 +L0: + r0 = borrow d.c + r1 = r0.x + r2 = borrow d.c + r3 = r2.y + r4 = r1 ^ r3 + keep_alive d, d + return r4 + +[case testBorrowOverI64ListGetItem1] +from mypy_extensions import i64 + +def f(n: i64) -> str: + a = [C()] + return a[n].s + +class C: + s: str +[out] +def f(n): + n :: int64 + r0 :: __main__.C + r1 :: list + r2, r3 :: ptr + a :: list + r4 :: object + r5 :: __main__.C + r6 :: str +L0: + r0 = C() + r1 = PyList_New(1) + r2 = get_element_ptr r1 ob_item :: PyListObject + r3 = load_mem r2 :: ptr* + set_mem r3, r0 :: builtins.object* + keep_alive r1 + a = r1 + r4 = CPyList_GetItemInt64Borrow(a, n) + r5 = borrow cast(__main__.C, r4) + r6 = r5.s + keep_alive a, n, r4 + return r6 + +[case testBorrowOverI64ListGetItem2] +from typing import List +from mypy_extensions import i64 + +def f(a: List[i64], n: i64) -> bool: + if a[n] == 0: + return True + return False +[out] +def f(a, n): + a :: list + n :: int64 + r0 :: object + r1 :: int64 + r2 :: bit +L0: + r0 = CPyList_GetItemInt64Borrow(a, n) + r1 = unbox(int64, r0) + r2 = r1 == 0 + keep_alive a, n + if r2 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + return 0 + +[case testCoerceShortIntToI64] +from mypy_extensions import i64 +from typing import List + +def f(a: List[i64], y: i64) -> bool: + if len(a) < y: + return True + return False + +def g(a: List[i64], y: i64) -> bool: + if y < len(a): + return True + return False +[out] +def f(a, y): + a :: list + y :: int64 + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: int64 + r4 :: bit +L0: + r0 = get_element_ptr a ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive a + r2 = r1 << 1 + r3 = r2 >> 1 + r4 = r3 < y :: signed + if r4 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + return 0 +def g(a, y): + a :: list + y :: int64 + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: int64 + r4 :: bit +L0: + r0 = get_element_ptr a ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive a + r2 = r1 << 1 + r3 = r2 >> 1 + r4 = y < r3 :: signed + if r4 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + return 0 + +[case testMultiplyListByI64_64bit] +from mypy_extensions import i64 +from typing import List + +def f(n: i64) -> List[i64]: + return [n] * n +[out] +def f(n): + n :: int64 + r0 :: list + r1 :: object + r2, r3 :: ptr + r4, r5 :: bit + r6, r7, r8 :: int + r9 :: list +L0: + r0 = PyList_New(1) + r1 = box(int64, n) + r2 = get_element_ptr r0 ob_item :: PyListObject + r3 = load_mem r2 :: ptr* + set_mem r3, r1 :: builtins.object* + keep_alive r0 + r4 = n <= 4611686018427387903 :: signed + if r4 goto L1 else goto L2 :: bool +L1: + r5 = n >= -4611686018427387904 :: signed + if r5 goto L3 else goto L2 :: bool +L2: + r6 = CPyTagged_FromInt64(n) + r7 = r6 + goto L4 +L3: + r8 = n << 1 + r7 = r8 +L4: + r9 = CPySequence_Multiply(r0, r7) + return r9 + +[case testShortIntAndI64Op] +from mypy_extensions import i64 +from typing import List + +def add_i64(a: List[i64], n: i64) -> i64: + return len(a) + n +def add_i64_2(a: List[i64], n: i64) -> i64: + return n + len(a) +def eq_i64(a: List[i64], n: i64) -> bool: + if len(a) == n: + return True + return False +def lt_i64(a: List[i64], n: i64) -> bool: + if n < len(a): + return True + return False +[out] +def add_i64(a, n): + a :: list + n :: int64 + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3, r4 :: int64 +L0: + r0 = get_element_ptr a ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive a + r2 = r1 << 1 + r3 = r2 >> 1 + r4 = r3 + n + return r4 +def add_i64_2(a, n): + a :: list + n :: int64 + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3, r4 :: int64 +L0: + r0 = get_element_ptr a ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive a + r2 = r1 << 1 + r3 = r2 >> 1 + r4 = n + r3 + return r4 +def eq_i64(a, n): + a :: list + n :: int64 + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: int64 + r4 :: bit +L0: + r0 = get_element_ptr a ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive a + r2 = r1 << 1 + r3 = r2 >> 1 + r4 = r3 == n + if r4 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + return 0 +def lt_i64(a, n): + a :: list + n :: int64 + r0 :: ptr + r1 :: native_int + r2 :: short_int + r3 :: int64 + r4 :: bit +L0: + r0 = get_element_ptr a ob_size :: PyVarObject + r1 = load_mem r0 :: native_int* + keep_alive a + r2 = r1 << 1 + r3 = r2 >> 1 + r4 = n < r3 :: signed + if r4 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + return 0 + +[case testOptionalI64_64bit] +from typing import Optional +from mypy_extensions import i64 + +def f(x: Optional[i64]) -> i64: + if x is None: + return 1 + return x +[out] +def f(x): + x :: union[int64, None] + r0 :: object + r1 :: bit + r2 :: int64 +L0: + r0 = load_address _Py_NoneStruct + r1 = x == r0 + if r1 goto L1 else goto L2 :: bool +L1: + return 1 +L2: + r2 = unbox(int64, x) + return r2 + +[case testI64DefaultValueSingle] +from mypy_extensions import i64 + +def f(x: i64, y: i64 = 0) -> i64: + return x + y + +def g() -> i64: + return f(7) + f(8, 9) +[out] +def f(x, y, __bitmap): + x, y :: int64 + __bitmap, r0 :: uint32 + r1 :: bit + r2 :: int64 +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + y = 0 +L2: + r2 = x + y + return r2 +def g(): + r0, r1, r2 :: int64 +L0: + r0 = f(7, 0, 0) + r1 = f(8, 9, 1) + r2 = r0 + r1 + return r2 + +[case testI64DefaultValueWithMultipleArgs] +from mypy_extensions import i64 + +def f(a: i64, b: i64 = 1, c: int = 2, d: i64 = 3) -> i64: + return 0 + +def g() -> i64: + return f(7) + f(8, 9) + f(1, 2, 3) + f(4, 5, 6, 7) +[out] +def f(a, b, c, d, __bitmap): + a, b :: int64 + c :: int + d :: int64 + __bitmap, r0 :: uint32 + r1 :: bit + r2 :: uint32 + r3 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + b = 1 +L2: + if is_error(c) goto L3 else goto L4 +L3: + c = 4 +L4: + r2 = __bitmap & 2 + r3 = r2 == 0 + if r3 goto L5 else goto L6 :: bool +L5: + d = 3 +L6: + return 0 +def g(): + r0 :: int + r1 :: int64 + r2 :: int + r3, r4, r5, r6, r7, r8 :: int64 +L0: + r0 = :: int + r1 = f(7, 0, r0, 0, 0) + r2 = :: int + r3 = f(8, 9, r2, 0, 1) + r4 = r1 + r3 + r5 = f(1, 2, 6, 0, 1) + r6 = r4 + r5 + r7 = f(4, 5, 12, 7, 3) + r8 = r6 + r7 + return r8 + +[case testI64MethodDefaultValue] +from mypy_extensions import i64 + +class C: + def m(self, x: i64 = 5) -> None: + pass + +def f(c: C) -> None: + c.m() + c.m(6) +[out] +def C.m(self, x, __bitmap): + self :: __main__.C + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 5 +L2: + return 1 +def f(c): + c :: __main__.C + r0, r1 :: None +L0: + r0 = c.m(0, 0) + r1 = c.m(6, 1) + return 1 + +[case testI64ExplicitConversionFromNativeInt] +from mypy_extensions import i64, i32 + +def from_i32(x: i32) -> i64: + return i64(x) + +def from_i64(x: i64) -> i64: + return i64(x) +[out] +def from_i32(x): + x :: int32 + r0 :: int64 +L0: + r0 = extend signed x: int32 to int64 + return r0 +def from_i64(x): + x :: int64 +L0: + return x + +[case testI64ExplicitConversionFromInt_64bit] +from mypy_extensions import i64 + +def f(x: int) -> i64: + return i64(x) +[out] +def f(x): + x :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + return r3 + +[case testI64ExplicitConversionToInt_64bit] +from mypy_extensions import i64 + +def f(x: i64) -> int: + return int(x) +[out] +def f(x): + x :: int64 + r0, r1 :: bit + r2, r3, r4 :: int +L0: + r0 = x <= 4611686018427387903 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = x >= -4611686018427387904 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(x) + r3 = r2 + goto L4 +L3: + r4 = x << 1 + r3 = r4 +L4: + return r3 + +[case testI64ExplicitConversionFromLiteral] +from mypy_extensions import i64 + +def f() -> None: + x = i64(0) + y = i64(11) + z = i64(-3) +[out] +def f(): + x, y, z :: int64 +L0: + x = 0 + y = 11 + z = -3 + return 1 + +[case testI64ForLoopOverRange] +from mypy_extensions import i64 + +def f() -> None: + for x in range(i64(4)): + y = x +[out] +def f(): + r0, x :: int64 + r1 :: bit + y, r2 :: int64 +L0: + r0 = 0 + x = r0 +L1: + r1 = r0 < 4 :: signed + if r1 goto L2 else goto L4 :: bool +L2: + y = x +L3: + r2 = r0 + 1 + r0 = r2 + x = r2 + goto L1 +L4: + return 1 + +[case testI64ForLoopOverRange2] +from mypy_extensions import i64 + +def f() -> None: + for x in range(0, i64(4)): + y = x +[out] +def f(): + r0, x :: int64 + r1 :: bit + y, r2 :: int64 +L0: + r0 = 0 + x = r0 +L1: + r1 = r0 < 4 :: signed + if r1 goto L2 else goto L4 :: bool +L2: + y = x +L3: + r2 = r0 + 1 + r0 = r2 + x = r2 + goto L1 +L4: + return 1 + +[case testI64MethodDefaultValueOverride] +from mypy_extensions import i64 + +class C: + def f(self, x: i64 = 11) -> None: pass +class D(C): + def f(self, x: i64 = 12) -> None: pass +[out] +def C.f(self, x, __bitmap): + self :: __main__.C + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 11 +L2: + return 1 +def D.f(self, x, __bitmap): + self :: __main__.D + x :: int64 + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 12 +L2: + return 1 + +[case testI64FinalConstants] +from typing_extensions import Final +from mypy_extensions import i64 + +A: Final = -1 +B: Final = -(1 + 3*2) +C: Final = 0 +D: Final = A - B +E: Final[i64] = 1 + 3 + +def f1() -> i64: + return A + +def f2() -> i64: + return A + B + +def f3() -> i64: + return C + +def f4() -> i64: + return D + +def f5() -> i64: + return E +[out] +def f1(): +L0: + return -1 +def f2(): +L0: + return -8 +def f3(): +L0: + return 0 +def f4(): +L0: + return 6 +def f5(): +L0: + return 4 + +[case testI64OperationsWithBools] +from mypy_extensions import i64 + +# TODO: Other mixed operations + +def add_bool_to_int(n: i64, b: bool) -> i64: + return n + b + +def compare_bool_to_i64(n: i64, b: bool) -> bool: + if n == b: + return b != n + return True +[out] +def add_bool_to_int(n, b): + n :: int64 + b :: bool + r0, r1 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + r1 = n + r0 + return r1 +def compare_bool_to_i64(n, b): + n :: int64 + b :: bool + r0 :: int64 + r1 :: bit + r2 :: int64 + r3 :: bit +L0: + r0 = extend b: builtins.bool to int64 + r1 = n == r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = extend b: builtins.bool to int64 + r3 = r2 != n + return r3 +L2: + return 1 + +[case testI64Cast_64bit] +from typing import cast +from mypy_extensions import i64 + +def cast_object(o: object) -> i64: + return cast(i64, o) + +def cast_int(x: int) -> i64: + return cast(i64, x) +[out] +def cast_object(o): + o :: object + r0 :: int64 +L0: + r0 = unbox(int64, o) + return r0 +def cast_int(x): + x :: int + r0 :: native_int + r1 :: bit + r2, r3 :: int64 + r4 :: ptr + r5 :: c_ptr + r6 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = x >> 1 + r3 = r2 + goto L3 +L2: + r4 = x ^ 1 + r5 = r4 + r6 = CPyLong_AsInt64(r5) + r3 = r6 + keep_alive x +L3: + return r3 + +[case testI64Cast_32bit] +from typing import cast +from mypy_extensions import i64 + +def cast_int(x: int) -> i64: + return cast(i64, x) +[out] +def cast_int(x): + x :: int + r0 :: native_int + r1 :: bit + r2, r3, r4 :: int64 + r5 :: ptr + r6 :: c_ptr + r7 :: int64 +L0: + r0 = x & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = extend signed x: builtins.int to int64 + r3 = r2 >> 1 + r4 = r3 + goto L3 +L2: + r5 = x ^ 1 + r6 = r5 + r7 = CPyLong_AsInt64(r6) + r4 = r7 + keep_alive x +L3: + return r4 + +[case testI64ExplicitConversionFromVariousTypes] +from mypy_extensions import i64 + +def bool_to_i64(b: bool) -> i64: + return i64(b) + +def str_to_i64(s: str) -> i64: + return i64(s) + +def str_to_i64_with_base(s: str) -> i64: + return i64(s, 2) + +class C: + def __int__(self) -> i64: + return 5 + +def instance_to_i64(c: C) -> i64: + return i64(c) + +def float_to_i64(x: float) -> i64: + return i64(x) +[out] +def bool_to_i64(b): + b :: bool + r0 :: int64 +L0: + r0 = extend b: builtins.bool to int64 + return r0 +def str_to_i64(s): + s :: str + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromStr(s) + r1 = unbox(int64, r0) + return r1 +def str_to_i64_with_base(s): + s :: str + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromStrWithBase(s, 4) + r1 = unbox(int64, r0) + return r1 +def C.__int__(self): + self :: __main__.C +L0: + return 5 +def instance_to_i64(c): + c :: __main__.C + r0 :: int64 +L0: + r0 = c.__int__() + return r0 +def float_to_i64(x): + x :: float + r0 :: object + r1 :: int64 +L0: + r0 = CPyLong_FromFloat(x) + r1 = unbox(int64, r0) + return r1 + +[case testI64IsinstanceNarrowing] +from typing import Union +from mypy_extensions import i64 + +class C: + a: i64 + +def narrow1(x: Union[C, i64]) -> i64: + if isinstance(x, i64): + return x + return x.a + +def narrow2(x: Union[C, i64]) -> i64: + if isinstance(x, int): + return x + return x.a +[out] +def narrow1(x): + x :: union[__main__.C, int64] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: int64 + r5 :: __main__.C + r6 :: int64 +L0: + r0 = load_address PyLong_Type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = unbox(int64, x) + return r4 +L2: + r5 = borrow cast(__main__.C, x) + r6 = r5.a + keep_alive x + return r6 +def narrow2(x): + x :: union[__main__.C, int64] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: int64 + r5 :: __main__.C + r6 :: int64 +L0: + r0 = load_address PyLong_Type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = unbox(int64, x) + return r4 +L2: + r5 = borrow cast(__main__.C, x) + r6 = r5.a + keep_alive x + return r6 diff --git a/mypyc/test-data/irbuild-int.test b/mypyc/test-data/irbuild-int.test index 8bf43cfa4923..fbe00aff4040 100644 --- a/mypyc/test-data/irbuild-int.test +++ b/mypyc/test-data/irbuild-int.test @@ -155,3 +155,81 @@ def divby8(x): L0: r0 = CPyTagged_Rshift(x, 6) return r0 + +[case testFinalConstantFolding] +from typing_extensions import Final + +X: Final = -1 +Y: Final = -(1 + 3*2) +Z: Final = Y + 1 + +class C: + A: Final = 1 + B: Final = -1 + +def f1() -> int: + return X + +def f2() -> int: + return X + Y + +def f3() -> int: + return Z + +def f4() -> int: + return C.A + +def f5() -> int: + return C.B +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C +L0: + __mypyc_self__.A = 2 + __mypyc_self__.B = -2 + return 1 +def f1(): +L0: + return -2 +def f2(): +L0: + return -16 +def f3(): +L0: + return -12 +def f4(): +L0: + return 2 +def f5(): +L0: + return -2 + +[case testConvertIntegralToInt] +def bool_to_int(b: bool) -> int: + return int(b) + +def int_to_int(n: int) -> int: + return int(n) +[out] +def bool_to_int(b): + b, r0 :: bool + r1 :: int +L0: + r0 = b << 1 + r1 = extend r0: builtins.bool to builtins.int + return r1 +def int_to_int(n): + n :: int +L0: + return n + +[case testIntUnaryPlus] +def unary_plus(n: int) -> int: + x = +n + return x +[out] +def unary_plus(n): + n, x :: int +L0: + x = n + return x diff --git a/mypyc/test-data/irbuild-lists.test b/mypyc/test-data/irbuild-lists.test index 47f7ada709e3..cb9687a2f942 100644 --- a/mypyc/test-data/irbuild-lists.test +++ b/mypyc/test-data/irbuild-lists.test @@ -428,3 +428,104 @@ L4: L5: res = r8 return 1 + +[case testSimplifyListUnion] +from typing import List, Union, Optional + +def narrow(a: Union[List[str], List[bytes], int]) -> int: + if isinstance(a, list): + return len(a) + return a +def loop(a: Union[List[str], List[bytes]]) -> None: + for x in a: + pass +def nested_union(a: Union[List[str], List[Optional[str]]]) -> None: + for x in a: + pass +[out] +def narrow(a): + a :: union[list, int] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: list + r5 :: ptr + r6 :: native_int + r7 :: short_int + r8 :: int +L0: + r0 = load_address PyList_Type + r1 = PyObject_IsInstance(a, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L2 :: bool +L1: + r4 = borrow cast(list, a) + r5 = get_element_ptr r4 ob_size :: PyVarObject + r6 = load_mem r5 :: native_int* + keep_alive r4 + r7 = r6 << 1 + keep_alive a + return r7 +L2: + r8 = unbox(int, a) + return r8 +def loop(a): + a :: list + r0 :: short_int + r1 :: ptr + r2 :: native_int + r3 :: short_int + r4 :: bit + r5 :: object + r6, x :: union[str, bytes] + r7 :: short_int +L0: + r0 = 0 +L1: + r1 = get_element_ptr a ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + keep_alive a + r3 = r2 << 1 + r4 = r0 < r3 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = CPyList_GetItemUnsafe(a, r0) + r6 = cast(union[str, bytes], r5) + x = r6 +L3: + r7 = r0 + 2 + r0 = r7 + goto L1 +L4: + return 1 +def nested_union(a): + a :: list + r0 :: short_int + r1 :: ptr + r2 :: native_int + r3 :: short_int + r4 :: bit + r5 :: object + r6, x :: union[str, None] + r7 :: short_int +L0: + r0 = 0 +L1: + r1 = get_element_ptr a ob_size :: PyVarObject + r2 = load_mem r1 :: native_int* + keep_alive a + r3 = r2 << 1 + r4 = r0 < r3 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = CPyList_GetItemUnsafe(a, r0) + r6 = cast(union[str, None], r5) + x = r6 +L3: + r7 = r0 + 2 + r0 = r7 + goto L1 +L4: + return 1 diff --git a/mypyc/test-data/irbuild-match.test b/mypyc/test-data/irbuild-match.test new file mode 100644 index 000000000000..2afe3d862f51 --- /dev/null +++ b/mypyc/test-data/irbuild-match.test @@ -0,0 +1,1708 @@ +[case testMatchValuePattern_python3_10] +def f(): + match 123: + case 123: + print("matched") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L3 +L2: +L3: + r8 = box(None, 1) + return r8 +[case testMatchOrPattern_python3_10] +def f(): + match 123: + case 123 | 456: + print("matched") +[out] +def f(): + r0, r1 :: bit + r2 :: str + r3 :: object + r4 :: str + r5 :: object + r6 :: object[1] + r7 :: object_ptr + r8, r9 :: object +L0: + r0 = 246 == 246 + if r0 goto L3 else goto L1 :: bool +L1: + r1 = 246 == 912 + if r1 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r2 = 'matched' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = [r2] + r7 = load_address r6 + r8 = _PyObject_Vectorcall(r5, r7, 1, 0) + keep_alive r2 + goto L5 +L4: +L5: + r9 = box(None, 1) + return r9 +[case testMatchOrPatternManyPatterns_python3_10] +def f(): + match 1: + case 1 | 2 | 3 | 4: + print("matched") +[out] +def f(): + r0, r1, r2, r3 :: bit + r4 :: str + r5 :: object + r6 :: str + r7 :: object + r8 :: object[1] + r9 :: object_ptr + r10, r11 :: object +L0: + r0 = 2 == 2 + if r0 goto L5 else goto L1 :: bool +L1: + r1 = 2 == 4 + if r1 goto L5 else goto L2 :: bool +L2: + r2 = 2 == 6 + if r2 goto L5 else goto L3 :: bool +L3: + r3 = 2 == 8 + if r3 goto L5 else goto L4 :: bool +L4: + goto L6 +L5: + r4 = 'matched' + r5 = builtins :: module + r6 = 'print' + r7 = CPyObject_GetAttr(r5, r6) + r8 = [r4] + r9 = load_address r8 + r10 = _PyObject_Vectorcall(r7, r9, 1, 0) + keep_alive r4 + goto L7 +L6: +L7: + r11 = box(None, 1) + return r11 +[case testMatchClassPattern_python3_10] +def f(): + match 123: + case int(): + print("matched") +[out] +def f(): + r0, r1 :: object + r2 :: bool + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = object 123 + r2 = CPy_TypeCheck(r1, r0) + if r2 goto L1 else goto L2 :: bool +L1: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L3 +L2: +L3: + r10 = box(None, 1) + return r10 +[case testMatchExaustivePattern_python3_10] +def f(): + match 123: + case _: + print("matched") +[out] +def f(): + r0 :: str + r1 :: object + r2 :: str + r3 :: object + r4 :: object[1] + r5 :: object_ptr + r6, r7 :: object +L0: +L1: + r0 = 'matched' + r1 = builtins :: module + r2 = 'print' + r3 = CPyObject_GetAttr(r1, r2) + r4 = [r0] + r5 = load_address r4 + r6 = _PyObject_Vectorcall(r3, r5, 1, 0) + keep_alive r0 + goto L3 +L2: +L3: + r7 = box(None, 1) + return r7 +[case testMatchMultipleBodies_python3_10] +def f(): + match 123: + case 123: + print("matched") + case 456: + print("no match") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7 :: object + r8 :: bit + r9 :: str + r10 :: object + r11 :: str + r12 :: object + r13 :: object[1] + r14 :: object_ptr + r15, r16 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L5 +L2: + r8 = 246 == 912 + if r8 goto L3 else goto L4 :: bool +L3: + r9 = 'no match' + r10 = builtins :: module + r11 = 'print' + r12 = CPyObject_GetAttr(r10, r11) + r13 = [r9] + r14 = load_address r13 + r15 = _PyObject_Vectorcall(r12, r14, 1, 0) + keep_alive r9 + goto L5 +L4: +L5: + r16 = box(None, 1) + return r16 +[case testMatchMultiBodyAndComplexOr_python3_10] +def f(): + match 123: + case 1: + print("here 1") + case 2 | 3: + print("here 2 | 3") + case 123: + print("here 123") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7 :: object + r8, r9 :: bit + r10 :: str + r11 :: object + r12 :: str + r13 :: object + r14 :: object[1] + r15 :: object_ptr + r16 :: object + r17 :: bit + r18 :: str + r19 :: object + r20 :: str + r21 :: object + r22 :: object[1] + r23 :: object_ptr + r24, r25 :: object +L0: + r0 = 246 == 2 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = 'here 1' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L9 +L2: + r8 = 246 == 4 + if r8 goto L5 else goto L3 :: bool +L3: + r9 = 246 == 6 + if r9 goto L5 else goto L4 :: bool +L4: + goto L6 +L5: + r10 = 'here 2 | 3' + r11 = builtins :: module + r12 = 'print' + r13 = CPyObject_GetAttr(r11, r12) + r14 = [r10] + r15 = load_address r14 + r16 = _PyObject_Vectorcall(r13, r15, 1, 0) + keep_alive r10 + goto L9 +L6: + r17 = 246 == 246 + if r17 goto L7 else goto L8 :: bool +L7: + r18 = 'here 123' + r19 = builtins :: module + r20 = 'print' + r21 = CPyObject_GetAttr(r19, r20) + r22 = [r18] + r23 = load_address r22 + r24 = _PyObject_Vectorcall(r21, r23, 1, 0) + keep_alive r18 + goto L9 +L8: +L9: + r25 = box(None, 1) + return r25 +[case testMatchWithGuard_python3_10] +def f(): + match 123: + case 123 if True: + print("matched") +[out] +def f(): + r0 :: bit + r1 :: str + r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + if r0 goto L1 else goto L3 :: bool +L1: + if 1 goto L2 else goto L3 :: bool +L2: + r1 = 'matched' + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [r1] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive r1 + goto L4 +L3: +L4: + r8 = box(None, 1) + return r8 +[case testMatchSingleton_python3_10] +def f(): + match 123: + case True: + print("value is True") + case False: + print("value is False") + case None: + print("value is None") +[out] +def f(): + r0, r1 :: object + r2 :: bit + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10, r11 :: object + r12 :: bit + r13 :: str + r14 :: object + r15 :: str + r16 :: object + r17 :: object[1] + r18 :: object_ptr + r19, r20, r21 :: object + r22 :: bit + r23 :: str + r24 :: object + r25 :: str + r26 :: object + r27 :: object[1] + r28 :: object_ptr + r29, r30 :: object +L0: + r0 = object 123 + r1 = box(bool, 1) + r2 = r0 == r1 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = 'value is True' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L7 +L2: + r10 = object 123 + r11 = box(bool, 0) + r12 = r10 == r11 + if r12 goto L3 else goto L4 :: bool +L3: + r13 = 'value is False' + r14 = builtins :: module + r15 = 'print' + r16 = CPyObject_GetAttr(r14, r15) + r17 = [r13] + r18 = load_address r17 + r19 = _PyObject_Vectorcall(r16, r18, 1, 0) + keep_alive r13 + goto L7 +L4: + r20 = load_address _Py_NoneStruct + r21 = object 123 + r22 = r21 == r20 + if r22 goto L5 else goto L6 :: bool +L5: + r23 = 'value is None' + r24 = builtins :: module + r25 = 'print' + r26 = CPyObject_GetAttr(r24, r25) + r27 = [r23] + r28 = load_address r27 + r29 = _PyObject_Vectorcall(r26, r28, 1, 0) + keep_alive r23 + goto L7 +L6: +L7: + r30 = box(None, 1) + return r30 +[case testMatchRecursiveOrPattern_python3_10] +def f(): + match 1: + case 1 | int(): + print("matched") +[out] +def f(): + r0 :: bit + r1, r2 :: object + r3 :: bool + r4 :: str + r5 :: object + r6 :: str + r7 :: object + r8 :: object[1] + r9 :: object_ptr + r10, r11 :: object +L0: + r0 = 2 == 2 + if r0 goto L3 else goto L1 :: bool +L1: + r1 = load_address PyLong_Type + r2 = object 1 + r3 = CPy_TypeCheck(r2, r1) + if r3 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r4 = 'matched' + r5 = builtins :: module + r6 = 'print' + r7 = CPyObject_GetAttr(r5, r6) + r8 = [r4] + r9 = load_address r8 + r10 = _PyObject_Vectorcall(r7, r9, 1, 0) + keep_alive r4 + goto L5 +L4: +L5: + r11 = box(None, 1) + return r11 +[case testMatchAsPattern_python3_10] +def f(): + match 123: + case 123 as x: + print(x) +[out] +def f(): + r0 :: bit + r1, x, r2 :: object + r3 :: str + r4 :: object + r5 :: object[1] + r6 :: object_ptr + r7, r8 :: object +L0: + r0 = 246 == 246 + r1 = object 123 + x = r1 + if r0 goto L1 else goto L2 :: bool +L1: + r2 = builtins :: module + r3 = 'print' + r4 = CPyObject_GetAttr(r2, r3) + r5 = [x] + r6 = load_address r5 + r7 = _PyObject_Vectorcall(r4, r6, 1, 0) + keep_alive x + goto L3 +L2: +L3: + r8 = box(None, 1) + return r8 +[case testMatchAsPatternOnOrPattern_python3_10] +def f(): + match 1: + case (1 | 2) as x: + print(x) +[out] +def f(): + r0 :: bit + r1, x :: object + r2 :: bit + r3, r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = 2 == 2 + r1 = object 1 + x = r1 + if r0 goto L3 else goto L1 :: bool +L1: + r2 = 2 == 4 + r3 = object 2 + x = r3 + if r2 goto L3 else goto L2 :: bool +L2: + goto L4 +L3: + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [x] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive x + goto L5 +L4: +L5: + r10 = box(None, 1) + return r10 +[case testMatchAsPatternOnClassPattern_python3_10] +def f(): + match 123: + case int() as i: + print(i) +[out] +def f(): + r0, r1 :: object + r2 :: bool + i :: int + r3 :: object + r4 :: str + r5, r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = object 123 + r2 = CPy_TypeCheck(r1, r0) + if r2 goto L1 else goto L3 :: bool +L1: + i = 246 +L2: + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = box(int, i) + r7 = [r6] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r5, r8, 1, 0) + keep_alive r6 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchClassPatternWithPositionalArgs_python3_10] +class Position: + __match_args__ = ("x", "y", "z") + + x: int + y: int + z: int + +def f(x): + match x: + case Position(1, 2, 3): + print("matched") +[out] +def Position.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.Position + r0, r1, r2 :: str + r3 :: tuple[str, str, str] +L0: + r0 = 'x' + r1 = 'y' + r2 = 'z' + r3 = (r0, r1, r2) + __mypyc_self__.__match_args__ = r3 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12, r13, r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: str + r19, r20, r21 :: object + r22 :: int32 + r23 :: bit + r24 :: bool + r25 :: str + r26 :: object + r27 :: str + r28 :: object + r29 :: object[1] + r30 :: object_ptr + r31, r32 :: object +L0: + r0 = __main__.Position :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'x' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L2 else goto L5 :: bool +L2: + r11 = 'y' + r12 = CPyObject_GetAttr(x, r11) + r13 = object 2 + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L3 else goto L5 :: bool +L3: + r18 = 'z' + r19 = CPyObject_GetAttr(x, r18) + r20 = object 3 + r21 = PyObject_RichCompare(r19, r20, 2) + r22 = PyObject_IsTrue(r21) + r23 = r22 >= 0 :: signed + r24 = truncate r22: int32 to builtins.bool + if r24 goto L4 else goto L5 :: bool +L4: + r25 = 'matched' + r26 = builtins :: module + r27 = 'print' + r28 = CPyObject_GetAttr(r26, r27) + r29 = [r25] + r30 = load_address r29 + r31 = _PyObject_Vectorcall(r28, r30, 1, 0) + keep_alive r25 + goto L6 +L5: +L6: + r32 = box(None, 1) + return r32 +[case testMatchClassPatternWithKeywordPatterns_python3_10] +class Position: + x: int + y: int + z: int + +def f(x): + match x: + case Position(z=1, y=2, x=3): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12, r13, r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: str + r19, r20, r21 :: object + r22 :: int32 + r23 :: bit + r24 :: bool + r25 :: str + r26 :: object + r27 :: str + r28 :: object + r29 :: object[1] + r30 :: object_ptr + r31, r32 :: object +L0: + r0 = __main__.Position :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'z' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L2 else goto L5 :: bool +L2: + r11 = 'y' + r12 = CPyObject_GetAttr(x, r11) + r13 = object 2 + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L3 else goto L5 :: bool +L3: + r18 = 'x' + r19 = CPyObject_GetAttr(x, r18) + r20 = object 3 + r21 = PyObject_RichCompare(r19, r20, 2) + r22 = PyObject_IsTrue(r21) + r23 = r22 >= 0 :: signed + r24 = truncate r22: int32 to builtins.bool + if r24 goto L4 else goto L5 :: bool +L4: + r25 = 'matched' + r26 = builtins :: module + r27 = 'print' + r28 = CPyObject_GetAttr(r26, r27) + r29 = [r25] + r30 = load_address r29 + r31 = _PyObject_Vectorcall(r28, r30, 1, 0) + keep_alive r25 + goto L6 +L5: +L6: + r32 = box(None, 1) + return r32 +[case testMatchClassPatternWithNestedPattern_python3_10] +class C: + num: int + +def f(x): + match x: + case C(num=1 | 2): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12 :: object + r13 :: int32 + r14 :: bit + r15 :: bool + r16 :: str + r17 :: object + r18 :: str + r19 :: object + r20 :: object[1] + r21 :: object_ptr + r22, r23 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = 'num' + r5 = CPyObject_GetAttr(x, r4) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L4 else goto L2 :: bool +L2: + r11 = object 2 + r12 = PyObject_RichCompare(r5, r11, 2) + r13 = PyObject_IsTrue(r12) + r14 = r13 >= 0 :: signed + r15 = truncate r13: int32 to builtins.bool + if r15 goto L4 else goto L3 :: bool +L3: + goto L5 +L4: + r16 = 'matched' + r17 = builtins :: module + r18 = 'print' + r19 = CPyObject_GetAttr(r17, r18) + r20 = [r16] + r21 = load_address r20 + r22 = _PyObject_Vectorcall(r19, r21, 1, 0) + keep_alive r16 + goto L6 +L5: +L6: + r23 = box(None, 1) + return r23 +[case testAsPatternDoesntBleedIntoSubPatterns_python3_10] +class C: + __match_args__ = ("a", "b") + a: int + b: int + +def f(x): + match x: + case C(1, 2) as y: + print("matched") +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0, r1 :: str + r2 :: tuple[str, str] +L0: + r0 = 'a' + r1 = 'b' + r2 = (r0, r1) + __mypyc_self__.__match_args__ = r2 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4, y :: __main__.C + r5 :: str + r6, r7, r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool + r12 :: str + r13, r14, r15 :: object + r16 :: int32 + r17 :: bit + r18 :: bool + r19 :: str + r20 :: object + r21 :: str + r22 :: object + r23 :: object[1] + r24 :: object_ptr + r25, r26 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L5 :: bool +L1: + r4 = cast(__main__.C, x) + y = r4 +L2: + r5 = 'a' + r6 = CPyObject_GetAttr(x, r5) + r7 = object 1 + r8 = PyObject_RichCompare(r6, r7, 2) + r9 = PyObject_IsTrue(r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + if r11 goto L3 else goto L5 :: bool +L3: + r12 = 'b' + r13 = CPyObject_GetAttr(x, r12) + r14 = object 2 + r15 = PyObject_RichCompare(r13, r14, 2) + r16 = PyObject_IsTrue(r15) + r17 = r16 >= 0 :: signed + r18 = truncate r16: int32 to builtins.bool + if r18 goto L4 else goto L5 :: bool +L4: + r19 = 'matched' + r20 = builtins :: module + r21 = 'print' + r22 = CPyObject_GetAttr(r20, r21) + r23 = [r19] + r24 = load_address r23 + r25 = _PyObject_Vectorcall(r22, r24, 1, 0) + keep_alive r19 + goto L6 +L5: +L6: + r26 = box(None, 1) + return r26 +[case testMatchClassPatternPositionalCapture_python3_10] +class C: + __match_args__ = ("x",) + + x: int + +def f(x): + match x: + case C(num): + print("matched") +[out] +def C.__mypyc_defaults_setup(__mypyc_self__): + __mypyc_self__ :: __main__.C + r0 :: str + r1 :: tuple[str] +L0: + r0 = 'x' + r1 = (r0) + __mypyc_self__.__match_args__ = r1 + return 1 +def f(x): + x, r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5 :: object + r6, num :: int + r7 :: str + r8 :: object + r9 :: str + r10 :: object + r11 :: object[1] + r12 :: object_ptr + r13, r14 :: object +L0: + r0 = __main__.C :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L3 :: bool +L1: + r4 = 'x' + r5 = CPyObject_GetAttr(x, r4) + r6 = unbox(int, r5) + num = r6 +L2: + r7 = 'matched' + r8 = builtins :: module + r9 = 'print' + r10 = CPyObject_GetAttr(r8, r9) + r11 = [r7] + r12 = load_address r11 + r13 = _PyObject_Vectorcall(r10, r12, 1, 0) + keep_alive r7 + goto L4 +L3: +L4: + r14 = box(None, 1) + return r14 +[case testMatchMappingEmpty_python3_10] +def f(x): + match x: + case {}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: object + r4 :: str + r5 :: object + r6 :: object[1] + r7 :: object_ptr + r8, r9 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = 'matched' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = [r2] + r7 = load_address r6 + r8 = _PyObject_Vectorcall(r5, r7, 1, 0) + keep_alive r2 + goto L3 +L2: +L3: + r9 = box(None, 1) + return r9 +[case testMatchMappingPatternWithKeys_python3_10] +def f(x): + match x: + case {"key": "value"}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: int32 + r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: str + r12 :: object + r13 :: str + r14 :: object + r15 :: object[1] + r16 :: object_ptr + r17, r18 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = 'key' + r3 = PyMapping_HasKey(x, r2) + r4 = r3 != 0 + if r4 goto L2 else goto L4 :: bool +L2: + r5 = PyObject_GetItem(x, r2) + r6 = 'value' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L4 :: bool +L3: + r11 = 'matched' + r12 = builtins :: module + r13 = 'print' + r14 = CPyObject_GetAttr(r12, r13) + r15 = [r11] + r16 = load_address r15 + r17 = _PyObject_Vectorcall(r14, r16, 1, 0) + keep_alive r11 + goto L5 +L4: +L5: + r18 = box(None, 1) + return r18 +[case testMatchMappingPatternWithRest_python3_10] +def f(x): + match x: + case {**rest}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2, rest :: dict + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L3 :: bool +L1: + r2 = CPyDict_FromAny(x) + rest = r2 +L2: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchMappingPatternWithRestPopKeys_python3_10] +def f(x): + match x: + case {"key": "value", **rest}: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: str + r3 :: int32 + r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, rest :: dict + r12 :: int32 + r13 :: bit + r14 :: str + r15 :: object + r16 :: str + r17 :: object + r18 :: object[1] + r19 :: object_ptr + r20, r21 :: object +L0: + r0 = CPyMapping_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = 'key' + r3 = PyMapping_HasKey(x, r2) + r4 = r3 != 0 + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PyObject_GetItem(x, r2) + r6 = 'value' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = CPyDict_FromAny(x) + rest = r11 + r12 = PyDict_DelItem(r11, r2) + r13 = r12 >= 0 :: signed +L4: + r14 = 'matched' + r15 = builtins :: module + r16 = 'print' + r17 = CPyObject_GetAttr(r15, r16) + r18 = [r14] + r19 = load_address r18 + r20 = _PyObject_Vectorcall(r17, r19, 1, 0) + keep_alive r14 + goto L6 +L5: +L6: + r21 = box(None, 1) + return r21 +[case testMatchEmptySequencePattern_python3_10] +def f(x): + match x: + case []: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: str + r6 :: object + r7 :: str + r8 :: object + r9 :: object[1] + r10 :: object_ptr + r11, r12 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L3 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 == 0 + if r4 goto L2 else goto L3 :: bool +L2: + r5 = 'matched' + r6 = builtins :: module + r7 = 'print' + r8 = CPyObject_GetAttr(r6, r7) + r9 = [r5] + r10 = load_address r9 + r11 = _PyObject_Vectorcall(r8, r10, 1, 0) + keep_alive r5 + goto L4 +L3: +L4: + r12 = box(None, 1) + return r12 +[case testMatchFixedLengthSequencePattern_python3_10] +def f(x): + match x: + case [1, 2]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: str + r18 :: object + r19 :: str + r20 :: object + r21 :: object[1] + r22 :: object_ptr + r23, r24 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 == 2 + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L5 :: bool +L4: + r17 = 'matched' + r18 = builtins :: module + r19 = 'print' + r20 = CPyObject_GetAttr(r18, r19) + r21 = [r17] + r22 = load_address r21 + r23 = _PyObject_Vectorcall(r20, r22, 1, 0) + keep_alive r17 + goto L6 +L5: +L6: + r24 = box(None, 1) + return r24 +[case testMatchSequencePatternWithTrailingUnboundStar_python3_10] +def f(x): + match x: + case [1, 2, *_]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: str + r18 :: object + r19 :: str + r20 :: object + r21 :: object[1] + r22 :: object_ptr + r23, r24 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L5 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L5 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L5 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L5 :: bool +L4: + r17 = 'matched' + r18 = builtins :: module + r19 = 'print' + r20 = CPyObject_GetAttr(r18, r19) + r21 = [r17] + r22 = load_address r21 + r23 = _PyObject_Vectorcall(r20, r22, 1, 0) + keep_alive r17 + goto L6 +L5: +L6: + r24 = box(None, 1) + return r24 +[case testMatchSequencePatternWithTrailingBoundStar_python3_10] +def f(x): + match x: + case [1, 2, *rest]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5, r6, r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11, r12, r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: native_int + r18, rest :: object + r19 :: str + r20 :: object + r21 :: str + r22 :: object + r23 :: object[1] + r24 :: object_ptr + r25, r26 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = object 1 + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L6 :: bool +L3: + r11 = PySequence_GetItem(x, 1) + r12 = object 2 + r13 = PyObject_RichCompare(r11, r12, 2) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L4 else goto L6 :: bool +L4: + r17 = r2 - 0 + r18 = PySequence_GetSlice(x, 2, r17) + rest = r18 +L5: + r19 = 'matched' + r20 = builtins :: module + r21 = 'print' + r22 = CPyObject_GetAttr(r20, r21) + r23 = [r19] + r24 = load_address r23 + r25 = _PyObject_Vectorcall(r22, r24, 1, 0) + keep_alive r19 + goto L7 +L6: +L7: + r26 = box(None, 1) + return r26 +[case testMatchSequenceWithStarPatternInTheMiddle_python3_10] +def f(x): + match x: + case ["start", *rest, "end"]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: object + r6 :: str + r7 :: object + r8 :: int32 + r9 :: bit + r10 :: bool + r11 :: native_int + r12 :: object + r13 :: str + r14 :: object + r15 :: int32 + r16 :: bit + r17 :: bool + r18 :: native_int + r19, rest :: object + r20 :: str + r21 :: object + r22 :: str + r23 :: object + r24 :: object[1] + r25 :: object_ptr + r26, r27 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = PySequence_GetItem(x, 0) + r6 = 'start' + r7 = PyObject_RichCompare(r5, r6, 2) + r8 = PyObject_IsTrue(r7) + r9 = r8 >= 0 :: signed + r10 = truncate r8: int32 to builtins.bool + if r10 goto L3 else goto L6 :: bool +L3: + r11 = r2 - 1 + r12 = PySequence_GetItem(x, r11) + r13 = 'end' + r14 = PyObject_RichCompare(r12, r13, 2) + r15 = PyObject_IsTrue(r14) + r16 = r15 >= 0 :: signed + r17 = truncate r15: int32 to builtins.bool + if r17 goto L4 else goto L6 :: bool +L4: + r18 = r2 - 1 + r19 = PySequence_GetSlice(x, 1, r18) + rest = r19 +L5: + r20 = 'matched' + r21 = builtins :: module + r22 = 'print' + r23 = CPyObject_GetAttr(r21, r22) + r24 = [r20] + r25 = load_address r24 + r26 = _PyObject_Vectorcall(r23, r25, 1, 0) + keep_alive r20 + goto L7 +L6: +L7: + r27 = box(None, 1) + return r27 +[case testMatchSequenceWithStarPatternAtTheStart_python3_10] +def f(x): + match x: + case [*rest, 1, 2]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: native_int + r6, r7, r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool + r12 :: native_int + r13, r14, r15 :: object + r16 :: int32 + r17 :: bit + r18 :: bool + r19 :: native_int + r20, rest :: object + r21 :: str + r22 :: object + r23 :: str + r24 :: object + r25 :: object[1] + r26 :: object_ptr + r27, r28 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L6 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 2 :: signed + if r4 goto L2 else goto L6 :: bool +L2: + r5 = r2 - 2 + r6 = PySequence_GetItem(x, r5) + r7 = object 1 + r8 = PyObject_RichCompare(r6, r7, 2) + r9 = PyObject_IsTrue(r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + if r11 goto L3 else goto L6 :: bool +L3: + r12 = r2 - 1 + r13 = PySequence_GetItem(x, r12) + r14 = object 2 + r15 = PyObject_RichCompare(r13, r14, 2) + r16 = PyObject_IsTrue(r15) + r17 = r16 >= 0 :: signed + r18 = truncate r16: int32 to builtins.bool + if r18 goto L4 else goto L6 :: bool +L4: + r19 = r2 - 2 + r20 = PySequence_GetSlice(x, 0, r19) + rest = r20 +L5: + r21 = 'matched' + r22 = builtins :: module + r23 = 'print' + r24 = CPyObject_GetAttr(r22, r23) + r25 = [r21] + r26 = load_address r25 + r27 = _PyObject_Vectorcall(r24, r26, 1, 0) + keep_alive r21 + goto L7 +L6: +L7: + r28 = box(None, 1) + return r28 +[case testMatchBuiltinClassPattern_python3_10] +def f(x): + match x: + case int(y): + print("matched") +[out] +def f(x): + x, r0 :: object + r1 :: bool + r2, y :: int + r3 :: str + r4 :: object + r5 :: str + r6 :: object + r7 :: object[1] + r8 :: object_ptr + r9, r10 :: object +L0: + r0 = load_address PyLong_Type + r1 = CPy_TypeCheck(x, r0) + if r1 goto L1 else goto L3 :: bool +L1: + r2 = unbox(int, x) + y = r2 +L2: + r3 = 'matched' + r4 = builtins :: module + r5 = 'print' + r6 = CPyObject_GetAttr(r4, r5) + r7 = [r3] + r8 = load_address r7 + r9 = _PyObject_Vectorcall(r6, r8, 1, 0) + keep_alive r3 + goto L4 +L3: +L4: + r10 = box(None, 1) + return r10 +[case testMatchSequenceCaptureAll_python3_10] +def f(x): + match x: + case [*rest]: + print("matched") +[out] +def f(x): + x :: object + r0 :: int32 + r1 :: bit + r2 :: native_int + r3, r4 :: bit + r5 :: native_int + r6, rest :: object + r7 :: str + r8 :: object + r9 :: str + r10 :: object + r11 :: object[1] + r12 :: object_ptr + r13, r14 :: object +L0: + r0 = CPySequence_Check(x) + r1 = r0 != 0 + if r1 goto L1 else goto L4 :: bool +L1: + r2 = PyObject_Size(x) + r3 = r2 >= 0 :: signed + r4 = r2 >= 0 :: signed + if r4 goto L2 else goto L4 :: bool +L2: + r5 = r2 - 0 + r6 = PySequence_GetSlice(x, 0, r5) + rest = r6 +L3: + r7 = 'matched' + r8 = builtins :: module + r9 = 'print' + r10 = CPyObject_GetAttr(r8, r9) + r11 = [r7] + r12 = load_address r11 + r13 = _PyObject_Vectorcall(r10, r12, 1, 0) + keep_alive r7 + goto L5 +L4: +L5: + r14 = box(None, 1) + return r14 +[case testMatchTypeAnnotatedNativeClass_python3_10] +class A: + a: int + +def f(x: A | int) -> int: + match x: + case A(a=a): + return a + case int(): + return x +[out] +def f(x): + x :: union[__main__.A, int] + r0 :: object + r1 :: int32 + r2 :: bit + r3 :: bool + r4 :: str + r5 :: object + r6, a :: int + r7 :: object + r8 :: bool + r9 :: int +L0: + r0 = __main__.A :: type + r1 = PyObject_IsInstance(x, r0) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + if r3 goto L1 else goto L3 :: bool +L1: + r4 = 'a' + r5 = CPyObject_GetAttr(x, r4) + r6 = unbox(int, r5) + a = r6 +L2: + return a +L3: + r7 = load_address PyLong_Type + r8 = CPy_TypeCheck(x, r7) + if r8 goto L4 else goto L5 :: bool +L4: + r9 = unbox(int, x) + return r9 +L5: +L6: + unreachable diff --git a/mypyc/test-data/irbuild-optional.test b/mypyc/test-data/irbuild-optional.test index 4b1d3d1ffec2..e98cf1b19e2e 100644 --- a/mypyc/test-data/irbuild-optional.test +++ b/mypyc/test-data/irbuild-optional.test @@ -527,14 +527,10 @@ class B: [out] def f(o): - o :: union[object, object] - r0 :: object - r1 :: str - r2, r3 :: object + o :: object + r0 :: str + r1 :: object L0: - r0 = o - r1 = 'x' - r2 = CPyObject_GetAttr(r0, r1) - r3 = r2 -L1: + r0 = 'x' + r1 = CPyObject_GetAttr(o, r0) return 1 diff --git a/mypyc/test-data/irbuild-set.test b/mypyc/test-data/irbuild-set.test index fec76751c915..c567422abac7 100644 --- a/mypyc/test-data/irbuild-set.test +++ b/mypyc/test-data/irbuild-set.test @@ -655,3 +655,185 @@ L0: r12 = PySet_Add(r0, r11) r13 = r12 >= 0 :: signed return r0 + +[case testOperatorInSetLiteral] +from typing_extensions import Final + +CONST: Final = "daylily" +non_const = 10 + +def precomputed(i: object) -> bool: + return i in {1, 2.0, 1 +2, 4j, "foo", b"bar", CONST, (None, (27,)), (), False} +def not_precomputed_non_final_name(i: int) -> bool: + return i in {non_const} +def not_precomputed_nested_set(i: int) -> bool: + return i in {frozenset({1}), 2} +[out] +def precomputed(i): + i :: object + r0 :: set + r1 :: int32 + r2 :: bit + r3 :: bool +L0: + r0 = frozenset({(), (None, (27,)), 1, 2.0, 3, 4j, False, b'bar', 'daylily', 'foo'}) + r1 = PySet_Contains(r0, i) + r2 = r1 >= 0 :: signed + r3 = truncate r1: int32 to builtins.bool + return r3 +def not_precomputed_non_final_name(i): + i :: int + r0 :: dict + r1 :: str + r2 :: object + r3 :: int + r4 :: set + r5 :: object + r6 :: int32 + r7 :: bit + r8 :: object + r9 :: int32 + r10 :: bit + r11 :: bool +L0: + r0 = __main__.globals :: static + r1 = 'non_const' + r2 = CPyDict_GetItem(r0, r1) + r3 = unbox(int, r2) + r4 = PySet_New(0) + r5 = box(int, r3) + r6 = PySet_Add(r4, r5) + r7 = r6 >= 0 :: signed + r8 = box(int, i) + r9 = PySet_Contains(r4, r8) + r10 = r9 >= 0 :: signed + r11 = truncate r9: int32 to builtins.bool + return r11 +def not_precomputed_nested_set(i): + i :: int + r0 :: set + r1 :: object + r2 :: int32 + r3 :: bit + r4 :: object + r5 :: set + r6 :: int32 + r7 :: bit + r8 :: object + r9 :: int32 + r10 :: bit + r11 :: object + r12 :: int32 + r13 :: bit + r14 :: bool +L0: + r0 = PySet_New(0) + r1 = object 1 + r2 = PySet_Add(r0, r1) + r3 = r2 >= 0 :: signed + r4 = PyFrozenSet_New(r0) + r5 = PySet_New(0) + r6 = PySet_Add(r5, r4) + r7 = r6 >= 0 :: signed + r8 = object 2 + r9 = PySet_Add(r5, r8) + r10 = r9 >= 0 :: signed + r11 = box(int, i) + r12 = PySet_Contains(r5, r11) + r13 = r12 >= 0 :: signed + r14 = truncate r12: int32 to builtins.bool + return r14 + +[case testForSetLiteral] +from typing_extensions import Final + +CONST: Final = 10 +non_const = 20 + +def precomputed() -> None: + for _ in {"None", "True", "False"}: + pass + +def precomputed2() -> None: + for _ in {None, False, 1, 2.0, "4", b"5", (6,), 7j, CONST, CONST + 1}: + pass + +def not_precomputed() -> None: + for not_optimized in {non_const}: + pass + +[out] +def precomputed(): + r0 :: set + r1, r2 :: object + r3 :: str + _ :: object + r4 :: bit +L0: + r0 = frozenset({'False', 'None', 'True'}) + r1 = PyObject_GetIter(r0) +L1: + r2 = PyIter_Next(r1) + if is_error(r2) goto L4 else goto L2 +L2: + r3 = cast(str, r2) + _ = r3 +L3: + goto L1 +L4: + r4 = CPy_NoErrOccured() +L5: + return 1 +def precomputed2(): + r0 :: set + r1, r2, _ :: object + r3 :: bit +L0: + r0 = frozenset({(6,), 1, 10, 11, 2.0, '4', 7j, False, None, b'5'}) + r1 = PyObject_GetIter(r0) +L1: + r2 = PyIter_Next(r1) + if is_error(r2) goto L4 else goto L2 +L2: + _ = r2 +L3: + goto L1 +L4: + r3 = CPy_NoErrOccured() +L5: + return 1 +def not_precomputed(): + r0 :: dict + r1 :: str + r2 :: object + r3 :: int + r4 :: set + r5 :: object + r6 :: int32 + r7 :: bit + r8, r9 :: object + r10, not_optimized :: int + r11 :: bit +L0: + r0 = __main__.globals :: static + r1 = 'non_const' + r2 = CPyDict_GetItem(r0, r1) + r3 = unbox(int, r2) + r4 = PySet_New(0) + r5 = box(int, r3) + r6 = PySet_Add(r4, r5) + r7 = r6 >= 0 :: signed + r8 = PyObject_GetIter(r4) +L1: + r9 = PyIter_Next(r8) + if is_error(r9) goto L4 else goto L2 +L2: + r10 = unbox(int, r9) + not_optimized = r10 +L3: + goto L1 +L4: + r11 = CPy_NoErrOccured() +L5: + return 1 + diff --git a/mypyc/test-data/irbuild-statements.test b/mypyc/test-data/irbuild-statements.test index ab947c956b74..090c7ed9f3df 100644 --- a/mypyc/test-data/irbuild-statements.test +++ b/mypyc/test-data/irbuild-statements.test @@ -1006,9 +1006,9 @@ L5: return 1 [case testForZip] -from typing import List, Iterable +from typing import List, Iterable, Sequence -def f(a: List[int], b: Iterable[bool]) -> None: +def f(a: List[int], b: Sequence[bool]) -> None: for x, y in zip(a, b): if b: x = 1 diff --git a/mypyc/test-data/irbuild-try.test b/mypyc/test-data/irbuild-try.test index d1119c5deefd..faf3fa1dbd2f 100644 --- a/mypyc/test-data/irbuild-try.test +++ b/mypyc/test-data/irbuild-try.test @@ -416,3 +416,108 @@ L19: L20: return 1 +[case testWithNativeSimple] +class DummyContext: + def __enter__(self) -> None: + pass + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + pass + +def foo(x: DummyContext) -> None: + with x: + print('hello') +[out] +def DummyContext.__enter__(self): + self :: __main__.DummyContext +L0: + return 1 +def DummyContext.__exit__(self, exc_type, exc_val, exc_tb): + self :: __main__.DummyContext + exc_type, exc_val, exc_tb :: object +L0: + return 1 +def foo(x): + x :: __main__.DummyContext + r0 :: None + r1 :: bool + r2 :: str + r3 :: object + r4 :: str + r5, r6 :: object + r7, r8 :: tuple[object, object, object] + r9, r10, r11 :: object + r12 :: None + r13 :: object + r14 :: int32 + r15 :: bit + r16 :: bool + r17 :: bit + r18, r19, r20 :: tuple[object, object, object] + r21 :: object + r22 :: None + r23 :: bit +L0: + r0 = x.__enter__() + r1 = 1 +L1: +L2: + r2 = 'hello' + r3 = builtins :: module + r4 = 'print' + r5 = CPyObject_GetAttr(r3, r4) + r6 = PyObject_CallFunctionObjArgs(r5, r2, 0) + goto L8 +L3: (handler for L2) + r7 = CPy_CatchError() + r1 = 0 + r8 = CPy_GetExcInfo() + r9 = r8[0] + r10 = r8[1] + r11 = r8[2] + r12 = x.__exit__(r9, r10, r11) + r13 = box(None, r12) + r14 = PyObject_IsTrue(r13) + r15 = r14 >= 0 :: signed + r16 = truncate r14: int32 to builtins.bool + if r16 goto L5 else goto L4 :: bool +L4: + CPy_Reraise() + unreachable +L5: +L6: + CPy_RestoreExcInfo(r7) + goto L8 +L7: (handler for L3, L4, L5) + CPy_RestoreExcInfo(r7) + r17 = CPy_KeepPropagating() + unreachable +L8: +L9: +L10: + r18 = :: tuple[object, object, object] + r19 = r18 + goto L12 +L11: (handler for L1, L6, L7, L8) + r20 = CPy_CatchError() + r19 = r20 +L12: + if r1 goto L13 else goto L14 :: bool +L13: + r21 = load_address _Py_NoneStruct + r22 = x.__exit__(r21, r21, r21) +L14: + if is_error(r19) goto L16 else goto L15 +L15: + CPy_Reraise() + unreachable +L16: + goto L20 +L17: (handler for L12, L13, L14, L15) + if is_error(r19) goto L19 else goto L18 +L18: + CPy_RestoreExcInfo(r19) +L19: + r23 = CPy_KeepPropagating() + unreachable +L20: + return 1 diff --git a/mypyc/test-data/refcount.test b/mypyc/test-data/refcount.test index ce365fc50e7e..372956a00cab 100644 --- a/mypyc/test-data/refcount.test +++ b/mypyc/test-data/refcount.test @@ -1490,3 +1490,39 @@ L0: r2 = CPyTagged_Subtract(r0, r1) c.x = r2; r3 = is_error return 1 + +[case testCoerceIntToI64_64bit] +from mypy_extensions import i64 + +def f(x: int) -> i64: + # TODO: On the fast path we shouldn't have a decref. Once we have high-level IR, + # coercion from int to i64 can be a single op, which makes it easier to + # generate optimal refcount handling for this case. + return x + 1 +[out] +def f(x): + x, r0 :: int + r1 :: native_int + r2 :: bit + r3, r4 :: int64 + r5 :: ptr + r6 :: c_ptr + r7 :: int64 +L0: + r0 = CPyTagged_Add(x, 2) + r1 = r0 & 1 + r2 = r1 == 0 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = r0 >> 1 + dec_ref r0 :: int + r4 = r3 + goto L3 +L2: + r5 = r0 ^ 1 + r6 = r5 + r7 = CPyLong_AsInt64(r6) + r4 = r7 + dec_ref r0 :: int +L3: + return r4 diff --git a/mypyc/test-data/run-async.test b/mypyc/test-data/run-async.test index e664ed3bb55a..85ad172d61df 100644 --- a/mypyc/test-data/run-async.test +++ b/mypyc/test-data/run-async.test @@ -13,6 +13,9 @@ async def g() -> int: async def f() -> int: return await g() +[file asyncio/__init__.pyi] +async def sleep(t: float) -> None: ... + [typing fixtures/typing-full.pyi] [file driver.py] diff --git a/mypyc/test-data/run-bools.test b/mypyc/test-data/run-bools.test index a7afc5f2b1a2..522296592c54 100644 --- a/mypyc/test-data/run-bools.test +++ b/mypyc/test-data/run-bools.test @@ -15,6 +15,11 @@ True False [case testBoolOps] +from typing import Optional, Any +MYPY = False +if MYPY: + from mypy_extensions import i64 + def f(x: bool) -> bool: if x: return False @@ -27,8 +32,8 @@ def test_if() -> None: def test_bitwise_and() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t & t == True assert t & f == False assert f & t == False @@ -40,8 +45,8 @@ def test_bitwise_and() -> None: def test_bitwise_or() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t | t == True assert t | f == True assert f | t == True @@ -53,8 +58,8 @@ def test_bitwise_or() -> None: def test_bitwise_xor() -> None: # Use eval() to avoid constand folding - t = eval('True') # type: bool - f = eval('False') # type: bool + t: bool = eval('True') + f: bool = eval('False') assert t ^ t == False assert t ^ f == True assert f ^ t == True @@ -66,7 +71,6 @@ def test_bitwise_xor() -> None: f ^= f assert f == False -[case testIsinstanceBool] def test_isinstance_bool() -> None: a = True b = 1.0 @@ -76,3 +80,144 @@ def test_isinstance_bool() -> None: assert isinstance(b, bool) == False assert isinstance(c, bool) == False assert isinstance(d, bool) == True + +class C: pass +class D: + def __init__(self, b: bool) -> None: + self.b = b + + def __bool__(self) -> bool: + return self.b + +class E: pass +class F(E): + def __init__(self, b: bool) -> None: + self.b = b + + def __bool__(self) -> bool: + return self.b + +def optional_to_bool1(o: Optional[C]) -> bool: + return bool(o) + +def optional_to_bool2(o: Optional[D]) -> bool: + return bool(o) + +def optional_to_bool3(o: Optional[E]) -> bool: + return bool(o) + +def test_optional_to_bool() -> None: + assert not optional_to_bool1(None) + assert optional_to_bool1(C()) + assert not optional_to_bool2(None) + assert not optional_to_bool2(D(False)) + assert optional_to_bool2(D(True)) + assert not optional_to_bool3(None) + assert optional_to_bool3(E()) + assert not optional_to_bool3(F(False)) + assert optional_to_bool3(F(True)) + +def test_any_to_bool() -> None: + a: Any = int() + b: Any = a + 1 + assert not bool(a) + assert bool(b) + +def eq(x: bool, y: bool) -> bool: + return x == y + +def ne(x: bool, y: bool) -> bool: + return x != y + +def lt(x: bool, y: bool) -> bool: + return x < y + +def le(x: bool, y: bool) -> bool: + return x <= y + +def gt(x: bool, y: bool) -> bool: + return x > y + +def ge(x: bool, y: bool) -> bool: + return x >= y + +def test_comparisons() -> None: + for x in True, False: + for y in True, False: + x2: Any = x + y2: Any = y + assert eq(x, y) == (x2 == y2) + assert ne(x, y) == (x2 != y2) + assert lt(x, y) == (x2 < y2) + assert le(x, y) == (x2 <= y2) + assert gt(x, y) == (x2 > y2) + assert ge(x, y) == (x2 >= y2) + +def eq_mixed(x: bool, y: int) -> bool: + return x == y + +def neq_mixed(x: int, y: bool) -> bool: + return x != y + +def lt_mixed(x: bool, y: int) -> bool: + return x < y + +def gt_mixed(x: int, y: bool) -> bool: + return x > y + +def test_mixed_comparisons() -> None: + for x in True, False: + for n in -(1 << 70), -123, 0, 1, 1753, 1 << 70: + assert eq_mixed(x, n) == (int(x) == n) + assert neq_mixed(n, x) == (n != int(x)) + assert lt_mixed(x, n) == (int(x) < n) + assert gt_mixed(n, x) == (n > int(x)) + +def add(x: bool, y: bool) -> int: + return x + y + +def add_mixed(b: bool, n: int) -> int: + return b + n + +def sub_mixed(n: int, b: bool) -> int: + return n - b + +def test_arithmetic() -> None: + for x in True, False: + for y in True, False: + assert add(x, y) == int(x) + int(y) + for n in -(1 << 70), -123, 0, 1, 1753, 1 << 70: + assert add_mixed(x, n) == int(x) + n + assert sub_mixed(n, x) == n - int(x) + +def add_mixed_i64(b: bool, n: i64) -> i64: + return b + n + +def sub_mixed_i64(n: i64, b: bool) -> i64: + return n - b + +def test_arithmetic_i64() -> None: + for x in True, False: + for n in -(1 << 62), -123, 0, 1, 1753, 1 << 62: + assert add_mixed_i64(x, n) == int(x) + n + assert sub_mixed_i64(n, x) == n - int(x) + +def eq_mixed_i64(x: bool, y: i64) -> bool: + return x == y + +def neq_mixed_i64(x: i64, y: bool) -> bool: + return x != y + +def lt_mixed_i64(x: bool, y: i64) -> bool: + return x < y + +def gt_mixed_i64(x: i64, y: bool) -> bool: + return x > y + +def test_mixed_comparisons_i64() -> None: + for x in True, False: + for n in -(1 << 62), -123, 0, 1, 1753, 1 << 62: + assert eq_mixed_i64(x, n) == (int(x) == n) + assert neq_mixed_i64(n, x) == (n != int(x)) + assert lt_mixed_i64(x, n) == (int(x) < n) + assert gt_mixed_i64(n, x) == (n > int(x)) diff --git a/mypyc/test-data/run-classes.test b/mypyc/test-data/run-classes.test index 0ed7b2c7fd2d..92ec3873bf38 100644 --- a/mypyc/test-data/run-classes.test +++ b/mypyc/test-data/run-classes.test @@ -263,6 +263,16 @@ class TestEnum(Enum): assert TestEnum.test() == 3 +import enum + +class Pokemon(enum.Enum): + magikarp = 1 + squirtle = 2 + slowbro = 3 + +assert Pokemon.magikarp.value == 1 +assert Pokemon.squirtle.name == 'squirtle' + [file other.py] # Force a multi-module test to make sure we can compile multi-file with # non-extension classes @@ -1774,6 +1784,36 @@ Represents a sequence of values. Updates itself by next, which is a new value. Represents a sequence of values. Updates itself by next, which is a new value. 3 3 +[out version>=3.11] +Traceback (most recent call last): + File "driver.py", line 5, in + print (x.rankine) + ^^^^^^^^^ + File "native.py", line 16, in rankine + raise NotImplementedError +NotImplementedError +0.0 +F: 32.0 C: 0.0 +100.0 +F: 212.0 C: 100.0 +1 +2 +3 +4 + [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26] + [7, 22, 11, 34, 17, 52, 26, 13, 40, 20, 10, 5, 16, 8, 4, 2, 1, 4, 2, 1] + [7, 11, 17, 26, 40, 10, 16, 4, 1, 2, 4, 1, 2, 4, 1, 2, 4, 1, 2, 4] +10 +34 +26 + [7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26] + [7, 22, 11, 34, 17, 52, 26, 13, 40, 20, 10, 5, 16, 8, 4, 2, 1, 4, 2, 1] + [7, 11, 17, 26, 40, 10, 16, 4, 1, 2, 4, 1, 2, 4, 1, 2, 4, 1, 2, 4] +Represents a sequence of values. Updates itself by next, which is a new value. +Represents a sequence of values. Updates itself by next, which is a new value. +Represents a sequence of values. Updates itself by next, which is a new value. +3 +3 [case testPropertySetters] @@ -1928,6 +1968,188 @@ import other_interpreted [out] +[case testAttributeOverridesProperty] +from typing import Any +from mypy_extensions import trait + +@trait +class T1: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + +class C1(T1): + x: int = 1 + y: int = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C1() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T1 = C1() + assert t.y == 4 + t = c + assert t.x == 5 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class B2: + @property + def x(self) -> int: + return 11 + + @property + def y(self) -> int: + return 25 + +class C2(B2): + x: int = 1 + y: int = 4 + +def test_read_only_property_in_class_implemented_as_attribute() -> None: + c = C2() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + b: B2 = C2() + assert b.y == 4 + b = c + assert b.x == 5 + assert b.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T3: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + +class B3: + x: int = 1 + y: int = 4 + +class C3(B3, T3): + pass + +def test_read_only_property_implemented_as_attribute_indirectly() -> None: + c = C3() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T3 = C3() + assert t.y == 4 + t = c + assert t.x == 5 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T4: + @property + def x(self) -> int: ... + @x.setter + def x(self, v1: int) -> None: ... + + @property + def y(self) -> int: ... + @y.setter + def y(self, v2: int) -> None: ... + +class C4(T4): + x: int = 1 + y: int = 4 + +def test_read_write_property_implemented_as_attribute() -> None: + c = C4() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T4 = C4() + assert t.y == 4 + t.x = 5 + assert t.x == 5 + t.y = 6 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +@trait +class T5: + @property + def x(self) -> int: ... + @x.setter + def x(self, v1: int) -> None: ... + + @property + def y(self) -> int: ... + @y.setter + def y(self, v2: int) -> None: ... + +class B5: + x: int = 1 + y: int = 4 + +class BB5(B5): + pass + +class C5(BB5, T5): + pass + +def test_read_write_property_indirectly_implemented_as_attribute() -> None: + c = C5() + c.x = 5 + assert c.x == 5 + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T5 = C5() + assert t.y == 4 + t.x = 5 + assert t.x == 5 + t.y = 6 + assert t.y == 6 + a: Any = c + assert a.x == 5 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + [case testSubclassAttributeAccess] from mypy_extensions import trait @@ -2216,3 +2438,14 @@ class Derived(Base): pass assert Derived()() == 1 + +[case testClassWithFinalAttribute] +from typing_extensions import Final + +class C: + A: Final = -1 + a: Final = [A] + +def test_final_attribute() -> None: + assert C.A == -1 + assert C.a == [-1] diff --git a/mypyc/test-data/run-dunders.test b/mypyc/test-data/run-dunders.test index aee2a956c47f..2845187de2c3 100644 --- a/mypyc/test-data/run-dunders.test +++ b/mypyc/test-data/run-dunders.test @@ -332,6 +332,13 @@ class C: def __float__(self) -> float: return float(self.x + 4) + def __pos__(self) -> int: + return self.x + 5 + + def __abs__(self) -> int: + return abs(self.x) + 6 + + def test_unary_dunders_generic() -> None: a: Any = C(10) @@ -339,6 +346,8 @@ def test_unary_dunders_generic() -> None: assert ~a == 12 assert int(a) == 13 assert float(a) == 14.0 + assert +a == 15 + assert abs(a) == 16 def test_unary_dunders_native() -> None: c = C(10) @@ -347,6 +356,8 @@ def test_unary_dunders_native() -> None: assert ~c == 12 assert int(c) == 13 assert float(c) == 14.0 + assert +c == 15 + assert abs(c) == 16 [case testDundersBinarySimple] from typing import Any @@ -391,6 +402,12 @@ class C: def __floordiv__(self, y: int) -> int: return self.x + y + 30 + def __divmod__(self, y: int) -> int: + return self.x + y + 40 + + def __pow__(self, y: int) -> int: + return self.x + y + 50 + def test_generic() -> None: a: Any = C() assert a + 3 == 8 @@ -406,11 +423,15 @@ def test_generic() -> None: assert a @ 3 == 18 assert a / 2 == 27 assert a // 2 == 37 + assert divmod(a, 2) == 47 + assert a ** 2 == 57 def test_native() -> None: c = C() assert c + 3 == 8 assert c - 3 == 2 + assert divmod(c, 3) == 48 + assert c ** 3 == 58 def test_error() -> None: a: Any = C() @@ -426,6 +447,12 @@ def test_error() -> None: assert str(e) == "unsupported operand type(s) for -: 'C' and 'str'" else: assert False + try: + a ** 'x' + except TypeError as e: + assert str(e) == "unsupported operand type(s) for **: 'C' and 'str'" + else: + assert False [case testDundersBinaryReverse] from typing import Any @@ -446,12 +473,20 @@ class C: def __rsub__(self, y: int) -> int: return self.x - y - 1 + def __pow__(self, y: int) -> int: + return self.x**y + + def __rpow__(self, y: int) -> int: + return self.x**y + 1 + def test_generic() -> None: a: Any = C() assert a + 3 == 8 assert 4 + a == 10 assert a - 3 == 2 assert 4 - a == 0 + assert a**3 == 125 + assert 4**a == 626 def test_native() -> None: c = C() @@ -459,6 +494,8 @@ def test_native() -> None: assert 4 + c == 10 assert c - 3 == 2 assert 4 - c == 0 + assert c**3 == 125 + assert 4**c == 626 def test_errors() -> None: a: Any = C() @@ -481,20 +518,37 @@ def test_errors() -> None: 'must be str, not C') else: assert False + try: + 'x' ** a + except TypeError as e: + assert str(e) == "unsupported operand type(s) for ** or pow(): 'str' and 'C'" + else: + assert False + class F: def __add__(self, x: int) -> int: return 5 + def __pow__(self, x: int) -> int: + return -5 + class G: def __add__(self, x: int) -> int: return 33 + def __pow__(self, x: int) -> int: + return -33 + def __radd__(self, x: F) -> int: return 6 + def __rpow__(self, x: F) -> int: + return -6 + def test_type_mismatch_fall_back_to_reverse() -> None: assert F() + G() == 6 + assert F()**G() == -6 [case testDundersBinaryNotImplemented] from typing import Any, Union @@ -702,6 +756,10 @@ class C: self.x += y + 5 return self + def __ipow__(self, y: int, __mod_throwaway: None = None) -> C: + self.x **= y + return self + def test_generic_1() -> None: c: Any = C() c += 3 @@ -716,6 +774,8 @@ def test_generic_1() -> None: assert c.x == 16 c //= 4 assert c.x == 40 + c **= 2 + assert c.x == 1600 def test_generic_2() -> None: c: Any = C() @@ -740,6 +800,8 @@ def test_native() -> None: assert c.x == 3 c *= 3 assert c.x == 9 + c **= 2 + assert c.x == 81 def test_error() -> None: c: Any = C() @@ -796,3 +858,88 @@ def test_dunder_min() -> None: assert max(y2, x2).val == 'xxx' assert min(y2, z2).val == 'zzz' assert max(x2, z2).val == 'zzz' + + +[case testDundersPowerSpecial] +import sys +from typing import Any, Optional +from testutil import assertRaises + +class Forward: + def __pow__(self, exp: int, mod: Optional[int] = None) -> int: + if mod is None: + return 2**exp + else: + return 2**exp % mod + +class ForwardModRequired: + def __pow__(self, exp: int, mod: int) -> int: + return 2**exp % mod + +class ForwardNotImplemented: + def __pow__(self, exp: int, mod: Optional[object] = None) -> Any: + return NotImplemented + +class Reverse: + def __rpow__(self, exp: int) -> int: + return 2**exp + 1 + +class Both: + def __pow__(self, exp: int, mod: Optional[int] = None) -> int: + if mod is None: + return 2**exp + else: + return 2**exp % mod + + def __rpow__(self, exp: int) -> int: + return 2**exp + 1 + +class Child(ForwardNotImplemented): + def __rpow__(self, exp: object) -> int: + return 50 + +class Inplace: + value = 2 + + def __ipow__(self, exp: int, mod: Optional[int] = None) -> "Inplace": + self.value **= exp - (mod or 0) + return self + +def test_native() -> None: + f = Forward() + assert f**3 == 8 + assert pow(f, 3) == 8 + assert pow(f, 3, 3) == 2 + assert pow(ForwardModRequired(), 3, 3) == 2 + b = Both() + assert b**3 == 8 + assert 3**b == 9 + assert pow(b, 3) == 8 + assert pow(b, 3, 3) == 2 + i = Inplace() + i **= 2 + assert i.value == 4 + +def test_errors() -> None: + if sys.version_info[0] >= 3 and sys.version_info[1] >= 10: + op = "** or pow()" + else: + op = "pow()" + + f = Forward() + with assertRaises(TypeError, f"unsupported operand type(s) for {op}: 'Forward', 'int', 'str'"): + pow(f, 3, "x") # type: ignore + with assertRaises(TypeError, "unsupported operand type(s) for **: 'Forward' and 'str'"): + f**"x" # type: ignore + r = Reverse() + with assertRaises(TypeError, "unsupported operand type(s) for ** or pow(): 'str' and 'Reverse'"): + "x"**r # type: ignore + with assertRaises(TypeError, f"unsupported operand type(s) for {op}: 'int', 'Reverse', 'int'"): + # Ternary pow() does not fallback to __rpow__ if LHS's __pow__ returns NotImplemented. + pow(3, r, 3) # type: ignore + with assertRaises(TypeError, f"unsupported operand type(s) for {op}: 'ForwardNotImplemented', 'Child', 'int'"): + # Ternary pow() does not try RHS's __rpow__ first when it's a subclass and redefines + # __rpow__ unlike other ops. + pow(ForwardNotImplemented(), Child(), 3) # type: ignore + with assertRaises(TypeError, "unsupported operand type(s) for ** or pow(): 'ForwardModRequired' and 'int'"): + ForwardModRequired()**3 # type: ignore diff --git a/mypyc/test-data/run-functions.test b/mypyc/test-data/run-functions.test index b6277c9e8ec4..21993891c4e3 100644 --- a/mypyc/test-data/run-functions.test +++ b/mypyc/test-data/run-functions.test @@ -430,9 +430,11 @@ def nested_funcs(n: int) -> List[Callable[..., Any]]: ls.append(f) return ls +def bool_default(x: bool = False, y: bool = True) -> str: + return str(x) + '-' + str(y) [file driver.py] -from native import f, g, h, same, nested_funcs, a_lambda +from native import f, g, h, same, nested_funcs, a_lambda, bool_default g() assert f(2) == (5, "test") assert f(s = "123", x = -2) == (1, "123") @@ -447,6 +449,10 @@ assert [f() for f in nested_funcs(10)] == list(range(10)) assert a_lambda(10) == 10 assert a_lambda() == 20 +assert bool_default() == 'False-True' +assert bool_default(True) == 'True-True' +assert bool_default(True, False) == 'True-False' + [case testMethodCallWithDefaultArgs] from typing import Tuple, List class A: @@ -1235,3 +1241,18 @@ def g() -> None: a.pop() g() + +[case testUnpackKwargsCompiled] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +def foo(**kwargs: Unpack[Person]) -> None: + print(kwargs["name"]) + +# This is not really supported yet, just test that we behave reasonably. +foo(name='Jennifer', age=38) +[out] +Jennifer diff --git a/mypyc/test-data/run-generators.test b/mypyc/test-data/run-generators.test index db658eea6504..bcf9da1846ae 100644 --- a/mypyc/test-data/run-generators.test +++ b/mypyc/test-data/run-generators.test @@ -650,3 +650,32 @@ from testutil import run_generator yields, val = run_generator(finally_yield()) assert yields == ('x',) assert val == 'test', val + +[case testUnreachableComprehensionNoCrash] +from typing import List + +def list_comp() -> List[int]: + if True: + return [5] + return [i for i in [5]] + +[file driver.py] +from native import list_comp +assert list_comp() == [5] + +[case testWithNative] +class DummyContext: + def __init__(self) -> None: + self.x = 0 + + def __enter__(self) -> None: + self.x += 1 + + def __exit__(self, exc_type, exc_value, exc_tb) -> None: + self.x -= 1 + +def test_basic() -> None: + context = DummyContext() + with context: + assert context.x == 1 + assert context.x == 0 diff --git a/mypyc/test-data/run-i32.test b/mypyc/test-data/run-i32.test new file mode 100644 index 000000000000..af99fb79d35e --- /dev/null +++ b/mypyc/test-data/run-i32.test @@ -0,0 +1,332 @@ +[case testI32BasicOps] +from typing import Any, Tuple + +from mypy_extensions import i32, i64 + +from testutil import assertRaises + +def test_box_and_unbox() -> None: + values = (list(range(-2**31, -2**31 + 100)) + + list(range(-1000, 1000)) + + list(range(2**31 - 100, 2**31))) + for i in values: + o: Any = i + x: i32 = o + o2: Any = x + assert o == o2 + assert x == i + with assertRaises(OverflowError, "int too large to convert to i32"): + o = 2**31 + x2: i32 = o + with assertRaises(OverflowError, "int too large to convert to i32"): + o = -2**32 - 1 + x3: i32 = o + +def div_by_7(x: i32) -> i32: + return x // 7 +def div_by_neg_7(x: i32) -> i32: + return x // -7 + +def div(x: i32, y: i32) -> i32: + return x // y + +def test_divide_by_constant() -> None: + for i in range(-1000, 1000): + assert div_by_7(i) == i // 7 + for i in range(-2**31, -2**31 + 1000): + assert div_by_7(i) == i // 7 + for i in range(2**31 - 1000, 2**31): + assert div_by_7(i) == i // 7 + +def test_divide_by_negative_constant() -> None: + for i in range(-1000, 1000): + assert div_by_neg_7(i) == i // -7 + for i in range(-2**31, -2**31 + 1000): + assert div_by_neg_7(i) == i // -7 + for i in range(2**31 - 1000, 2**31): + assert div_by_neg_7(i) == i // -7 + +def test_divide_by_variable() -> None: + values = (list(range(-50, 50)) + + list(range(-2**31, -2**31 + 10)) + + list(range(2**31 - 10, 2**31))) + for x in values: + for y in values: + if y != 0: + if x // y == 2**31: + with assertRaises(OverflowError, "integer division overflow"): + div(x, y) + else: + assert div(x, y) == x // y + else: + with assertRaises(ZeroDivisionError, "integer division or modulo by zero"): + div(x, y) + +def mod_by_7(x: i32) -> i32: + return x % 7 + +def mod_by_neg_7(x: i32) -> i32: + return x // -7 + +def mod(x: i32, y: i32) -> i32: + return x % y + +def test_mod_by_constant() -> None: + for i in range(-1000, 1000): + assert mod_by_7(i) == i % 7 + for i in range(-2**31, -2**31 + 1000): + assert mod_by_7(i) == i % 7 + for i in range(2**31 - 1000, 2**31): + assert mod_by_7(i) == i % 7 + +def test_mod_by_negative_constant() -> None: + for i in range(-1000, 1000): + assert mod_by_neg_7(i) == i // -7 + for i in range(-2**31, -2**31 + 1000): + assert mod_by_neg_7(i) == i // -7 + for i in range(2**31 - 1000, 2**31): + assert mod_by_neg_7(i) == i // -7 + +def test_mod_by_variable() -> None: + values = (list(range(-50, 50)) + + list(range(-2**31, -2**31 + 10)) + + list(range(2**31 - 10, 2**31))) + for x in values: + for y in values: + if y != 0: + assert mod(x, y) == x % y + else: + with assertRaises(ZeroDivisionError, "integer division or modulo by zero"): + mod(x, y) + +def test_simple_arithmetic_ops() -> None: + zero: i32 = int() + one: i32 = zero + 1 + two: i32 = one + 1 + neg_one: i32 = -one + assert one + one == 2 + assert one + two == 3 + assert one + neg_one == 0 + assert one - one == 0 + assert one - two == -1 + assert one * one == 1 + assert one * two == 2 + assert two * two == 4 + assert two * neg_one == -2 + assert neg_one * one == -1 + assert neg_one * neg_one == 1 + assert two * 0 == 0 + assert 0 * two == 0 + assert -one == -1 + assert -two == -2 + assert -neg_one == 1 + assert -zero == 0 + +def test_bitwise_ops() -> None: + x: i32 = 1920687484 + int() + y: i32 = 383354614 + int() + z: i32 = -1879040563 + int() + zero: i32 = int() + one: i32 = zero + 1 + two: i32 = zero + 2 + neg_one: i32 = -one + + assert x & y == 307823732 + assert x & z == 268442956 + assert z & z == z + assert x & zero == 0 + + assert x | y == 1996218366 + assert x | z == -226796035 + assert z | z == z + assert x | 0 == x + + assert x ^ y == 1688394634 + assert x ^ z == -495238991 + assert z ^ z == 0 + assert z ^ 0 == z + + assert x << one == -453592328 + assert x << two == -907184656 + assert z << two == 1073772340 + assert z << 0 == z + + assert x >> one == 960343742 + assert x >> two == 480171871 + assert z >> two == -469760141 + assert z >> 0 == z + + assert ~x == -1920687485 + assert ~z == 1879040562 + assert ~zero == -1 + assert ~neg_one == 0 + +def eq(x: i32, y: i32) -> bool: + return x == y + +def test_eq() -> None: + assert eq(int(), int()) + assert eq(5 + int(), 5 + int()) + assert eq(-5 + int(), -5 + int()) + assert not eq(int(), 1 + int()) + assert not eq(5 + int(), 6 + int()) + assert not eq(-5 + int(), -6 + int()) + assert not eq(-5 + int(), 5 + int()) + +def test_comparisons() -> None: + one: i32 = 1 + int() + one2: i32 = 1 + int() + two: i32 = 2 + int() + assert one < two + assert not (one < one2) + assert not (two < one) + assert two > one + assert not (one > one2) + assert not (one > two) + assert one <= two + assert one <= one2 + assert not (two <= one) + assert two >= one + assert one >= one2 + assert not (one >= two) + assert one == one2 + assert not (one == two) + assert one != two + assert not (one != one2) + +def test_mixed_comparisons() -> None: + i32_3: i32 = int() + 3 + int_5 = int() + 5 + assert i32_3 < int_5 + assert int_5 > i32_3 + b = i32_3 > int_5 + assert not b + + int_largest = int() + (1 << 31) - 1 + assert int_largest > i32_3 + int_smallest = int() - (1 << 31) + assert i32_3 > int_smallest + + int_too_big = int() + (1 << 31) + int_too_small = int() - (1 << 31) - 1 + with assertRaises(OverflowError): + assert i32_3 < int_too_big + with assertRaises(OverflowError): + assert int_too_big < i32_3 + with assertRaises(OverflowError): + assert i32_3 > int_too_small + with assertRaises(OverflowError): + assert int_too_small < i32_3 + +def test_mixed_arithmetic_and_bitwise_ops() -> None: + i32_3: i32 = int() + 3 + int_5 = int() + 5 + assert i32_3 + int_5 == 8 + assert int_5 - i32_3 == 2 + assert i32_3 << int_5 == 96 + assert int_5 << i32_3 == 40 + assert i32_3 ^ int_5 == 6 + assert int_5 | i32_3 == 7 + + int_largest = int() + (1 << 31) - 1 + assert int_largest - i32_3 == 2147483644 + int_smallest = int() - (1 << 31) + assert int_smallest + i32_3 == -2147483645 + + int_too_big = int() + (1 << 31) + int_too_small = int() - (1 << 31) - 1 + with assertRaises(OverflowError): + assert i32_3 & int_too_big + with assertRaises(OverflowError): + assert int_too_small & i32_3 + +def test_coerce_to_and_from_int() -> None: + for shift in range(0, 32): + for sign in 1, -1: + for delta in range(-5, 5): + n = sign * (1 << shift) + delta + if -(1 << 31) <= n < (1 << 31): + x: i32 = n + m: int = x + assert m == n + +def test_explicit_conversion_to_i32() -> None: + x = i32(5) + assert x == 5 + y = int() - 113 + x = i32(y) + assert x == -113 + n64: i64 = 1733 + x = i32(n64) + assert x == 1733 + n32 = -1733 + x = i32(n32) + assert x == -1733 + z = i32(x) + assert z == -1733 + +def test_explicit_conversion_overflow() -> None: + max_i32 = int() + 2**31 - 1 + x = i32(max_i32) + assert x == 2**31 - 1 + assert int(x) == max_i32 + + min_i32 = int() - 2**31 + y = i32(min_i32) + assert y == -2**31 + assert int(y) == min_i32 + + too_big = int() + 2**31 + with assertRaises(OverflowError): + x = i32(too_big) + + too_small = int() - 2**31 - 1 + with assertRaises(OverflowError): + x = i32(too_small) + +def test_i32_from_large_small_literal() -> None: + x = i32(2**31 - 1) + assert x == 2**31 - 1 + x = i32(-2**31) + assert x == -2**31 + +def test_i32_truncate_from_i64() -> None: + large = i64(2**32 + 157 + int()) + x = i32(large) + assert x == 157 + small = i64(-2**32 - 157 + int()) + x = i32(small) + assert x == -157 + large2 = i64(2**31 + int()) + x = i32(large2) + assert x == -2**31 + small2 = i64(-2**31 - 1 - int()) + x = i32(small2) + assert x == 2**31 - 1 + +def from_float(x: float) -> i32: + return i32(x) + +def test_explicit_conversion_from_float() -> None: + assert from_float(0.0) == 0 + assert from_float(1.456) == 1 + assert from_float(-1234.567) == -1234 + assert from_float(2**31 - 1) == 2**31 - 1 + assert from_float(-2**31) == -2**31 + # The error message could be better, but this is acceptable + with assertRaises(OverflowError, "int too large to convert to i32"): + assert from_float(float(2**31)) + with assertRaises(OverflowError, "int too large to convert to i32"): + # One ulp below the lowest valid i64 value + from_float(float(-2**31 - 2048)) + +def test_tuple_i32() -> None: + a: i32 = 1 + b: i32 = 2 + t = (a, b) + a, b = t + assert a == 1 + assert b == 2 + x: Any = t + tt: Tuple[i32, i32] = x + assert tt == (1, 2) diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test new file mode 100644 index 000000000000..cd4ac19532d2 --- /dev/null +++ b/mypyc/test-data/run-i64.test @@ -0,0 +1,1501 @@ +[case testI64BasicOps] +from typing import List, Any, Tuple, Union + +from mypy_extensions import i64, i32 + +from testutil import assertRaises + +def inc(n: i64) -> i64: + return n + 1 + +def test_inc() -> None: + # Use int() to avoid constant folding + n = 1 + int() + m = 2 + int() + assert inc(n) == m + +def min_ll(x: i64, y: i64) -> i64: + if x < y: + return x + else: + return y + +def test_min() -> None: + assert min_ll(1 + int(), 2) == 1 + assert min_ll(2 + int(), 1) == 1 + assert min_ll(1 + int(), 1) == 1 + assert min_ll(-2 + int(), 1) == -2 + assert min_ll(1 + int(), -2) == -2 + +def eq(x: i64, y: i64) -> bool: + return x == y + +def test_eq() -> None: + assert eq(int(), int()) + assert eq(5 + int(), 5 + int()) + assert eq(-5 + int(), -5 + int()) + assert not eq(int(), 1 + int()) + assert not eq(5 + int(), 6 + int()) + assert not eq(-5 + int(), -6 + int()) + assert not eq(-5 + int(), 5 + int()) + +def test_comparisons() -> None: + one: i64 = 1 + int() + one2: i64 = 1 + int() + two: i64 = 2 + int() + assert one < two + assert not (one < one2) + assert not (two < one) + assert two > one + assert not (one > one2) + assert not (one > two) + assert one <= two + assert one <= one2 + assert not (two <= one) + assert two >= one + assert one >= one2 + assert not (one >= two) + assert one == one2 + assert not (one == two) + assert one != two + assert not (one != one2) + +def is_true(x: i64) -> bool: + if x: + return True + else: + return False + +def is_true2(x: i64) -> bool: + return bool(x) + +def is_false(x: i64) -> bool: + if not x: + return True + else: + return False + +def test_i64_as_bool() -> None: + assert not is_true(0) + assert not is_true2(0) + assert is_false(0) + for x in 1, 55, -1, -7, 1 << 40, -(1 << 50): + assert is_true(x) + assert is_true2(x) + assert not is_false(x) + +def bool_as_i64(b: bool) -> i64: + return b + +def test_bool_as_i64() -> None: + assert bool_as_i64(False) == 0 + assert bool_as_i64(True) == 1 + +def div_by_3(x: i64) -> i64: + return x // 3 + +def div_by_neg_3(x: i64) -> i64: + return x // -3 + +def div(x: i64, y: i64) -> i64: + return x // y + +def test_divide_by_constant() -> None: + for i in range(-1000, 1000): + assert div_by_3(i) == i // 3 + for i in range(-2**63, -2**63 + 1000): + assert div_by_3(i) == i // 3 + for i in range(2**63 - 1000, 2**63): + assert div_by_3(i) == i // 3 + +def test_divide_by_negative_constant() -> None: + for i in range(-1000, 1000): + assert div_by_neg_3(i) == i // -3 + for i in range(-2**63, -2**63 + 1000): + assert div_by_neg_3(i) == i // -3 + for i in range(2**63 - 1000, 2**63): + assert div_by_neg_3(i) == i // -3 + +def test_divide_by_variable() -> None: + values = (list(range(-50, 50)) + + list(range(-2**63, -2**63 + 10)) + + list(range(2**63 - 10, 2**63))) + for x in values: + for y in values: + if y != 0: + if x // y == 2**63: + with assertRaises(OverflowError, "integer division overflow"): + div(x, y) + else: + assert div(x, y) == x // y + else: + with assertRaises(ZeroDivisionError, "integer division or modulo by zero"): + div(x, y) + +def mod_by_7(x: i64) -> i64: + return x % 7 + +def mod_by_neg_7(x: i64) -> i64: + return x // -7 + +def mod(x: i64, y: i64) -> i64: + return x % y + +def test_mod_by_constant() -> None: + for i in range(-1000, 1000): + assert mod_by_7(i) == i % 7 + for i in range(-2**63, -2**63 + 1000): + assert mod_by_7(i) == i % 7 + for i in range(2**63 - 1000, 2**63): + assert mod_by_7(i) == i % 7 + +def test_mod_by_negative_constant() -> None: + for i in range(-1000, 1000): + assert mod_by_neg_7(i) == i // -7 + for i in range(-2**63, -2**63 + 1000): + assert mod_by_neg_7(i) == i // -7 + for i in range(2**63 - 1000, 2**63): + assert mod_by_neg_7(i) == i // -7 + +def test_mod_by_variable() -> None: + values = (list(range(-50, 50)) + + list(range(-2**63, -2**63 + 10)) + + list(range(2**63 - 10, 2**63))) + for x in values: + for y in values: + if y != 0: + assert mod(x, y) == x % y + else: + with assertRaises(ZeroDivisionError, "integer division or modulo by zero"): + mod(x, y) + +def get_item(a: List[i64], n: i64) -> i64: + return a[n] + +def test_get_list_item() -> None: + a = [1, 6, -2] + assert get_item(a, 0) == 1 + assert get_item(a, 1) == 6 + assert get_item(a, 2) == -2 + assert get_item(a, -1) == -2 + assert get_item(a, -2) == 6 + assert get_item(a, -3) == 1 + with assertRaises(IndexError, "list index out of range"): + get_item(a, 3) + with assertRaises(IndexError, "list index out of range"): + get_item(a, -4) + # TODO: Very large/small values and indexes + +def test_simple_arithmetic_ops() -> None: + zero: i64 = int() + one: i64 = zero + 1 + two: i64 = one + 1 + neg_one: i64 = -one + assert one + one == 2 + assert one + two == 3 + assert one + neg_one == 0 + assert one - one == 0 + assert one - two == -1 + assert one * one == 1 + assert one * two == 2 + assert two * two == 4 + assert two * neg_one == -2 + assert neg_one * one == -1 + assert neg_one * neg_one == 1 + assert two * 0 == 0 + assert 0 * two == 0 + assert -one == -1 + assert -two == -2 + assert -neg_one == 1 + assert -zero == 0 + +def test_bitwise_ops() -> None: + x: i64 = 7997307308812232241 + int() + y: i64 = 4333433528471475340 + int() + z: i64 = -2462230749488444526 + int() + zero: i64 = int() + one: i64 = zero + 1 + two: i64 = zero + 2 + neg_one: i64 = -one + + assert x & y == 3179577071592752128 + assert x & z == 5536089561888850448 + assert z & z == z + assert x & zero == 0 + + assert x | y == 9151163765690955453 + assert x | z == -1013002565062733 + assert z | z == z + assert x | 0 == x + + assert x ^ y == 5971586694098203325 + assert x ^ z == -5537102564453913181 + assert z ^ z == 0 + assert z ^ 0 == z + + assert x << one == -2452129456085087134 + assert x << two == -4904258912170174268 + assert z << two == 8597821075755773512 + assert z << 0 == z + + assert x >> one == 3998653654406116120 + assert x >> two == 1999326827203058060 + assert z >> two == -615557687372111132 + assert z >> 0 == z + + assert ~x == -7997307308812232242 + assert ~z == 2462230749488444525 + assert ~zero == -1 + assert ~neg_one == 0 + +def test_coerce_to_and_from_int() -> None: + for shift in range(0, 64): + for sign in 1, -1: + for delta in range(-5, 5): + n = sign * (1 << shift) + delta + if -(1 << 63) <= n < (1 << 63): + x: i64 = n + m: int = x + assert m == n + +def test_coerce_to_and_from_int2() -> None: + for shift in range(0, 64): + for sign in 1, -1: + for delta in range(-5, 5): + n = sign * (1 << shift) + delta + if -(1 << 63) <= n < (1 << 63): + x: i64 = i64(n) + m: int = int(x) + assert m == n + +def test_explicit_conversion_to_i64() -> None: + x = i64(5) + assert x == 5 + y = int() - 113 + x = i64(y) + assert x == -113 + n32: i32 = 1733 + x = i64(n32) + assert x == 1733 + n32 = -1733 + x = i64(n32) + assert x == -1733 + z = i64(x) + assert z == -1733 + +def test_explicit_conversion_overflow() -> None: + max_i64 = int() + 2**63 - 1 + x = i64(max_i64) + assert x == 2**63 - 1 + assert int(x) == max_i64 + + min_i64 = int() - 2**63 + y = i64(min_i64) + assert y == -2**63 + assert int(y) == min_i64 + + too_big = int() + 2**63 + with assertRaises(OverflowError): + x = i64(too_big) + + too_small = int() - 2**63 - 1 + with assertRaises(OverflowError): + x = i64(too_small) + +def test_i64_from_large_small_literal() -> None: + x = i64(2**63 - 1) + assert x == 2**63 - 1 + x = i64(-2**63) + assert x == -2**63 + +def from_float(x: float) -> i64: + return i64(x) + +def test_explicit_conversion_from_float() -> None: + assert from_float(0.0) == 0 + assert from_float(1.456) == 1 + assert from_float(-1234.567) == -1234 + assert from_float(2**63 - 1) == 2**63 - 1 + assert from_float(-2**63) == -2**63 + # The error message could be better, but this is acceptable + with assertRaises(OverflowError, "int too large to convert to i64"): + assert from_float(float(2**63)) + with assertRaises(OverflowError, "int too large to convert to i64"): + # One ulp below the lowest valid i64 value + from_float(float(-2**63 - 2048)) + +def from_str(s: str) -> i64: + return i64(s) + +def test_explicit_conversion_from_str() -> None: + assert from_str("0") == 0 + assert from_str("1") == 1 + assert from_str("-1234") == -1234 + with assertRaises(ValueError): + from_str("1.2") + +def from_str_with_base(s: str, base: int) -> i64: + return i64(s, base) + +def test_explicit_conversion_from_str_with_base() -> None: + assert from_str_with_base("101", 2) == 5 + assert from_str_with_base("109", 10) == 109 + assert from_str_with_base("-f0A", 16) == -3850 + assert from_str_with_base("0x1a", 16) == 26 + assert from_str_with_base("0X1A", 16) == 26 + with assertRaises(ValueError): + from_str_with_base("1.2", 16) + +def from_bool(b: bool) -> i64: + return i64(b) + +def test_explicit_conversion_from_bool() -> None: + assert from_bool(True) == 1 + assert from_bool(False) == 0 + +class IntConv: + def __init__(self, x: i64) -> None: + self.x = x + + def __int__(self) -> i64: + return self.x + 1 + +def test_explicit_conversion_from_instance() -> None: + assert i64(IntConv(0)) == 1 + assert i64(IntConv(12345)) == 12346 + assert i64(IntConv(-23)) == -22 + +def test_explicit_conversion_from_any() -> None: + # This can't be specialized + a: Any = "101" + assert i64(a, base=2) == 5 + +def test_tuple_i64() -> None: + a: i64 = 1 + b: i64 = 2 + t = (a, b) + a, b = t + assert a == 1 + assert b == 2 + x: Any = t + tt: Tuple[i64, i64] = x + assert tt == (1, 2) + +def test_list_set_item() -> None: + a: List[i64] = [0, 2, 6] + z: i64 = int() + a[z] = 1 + assert a == [1, 2, 6] + a[z + 2] = 9 + assert a == [1, 2, 9] + a[-(z + 1)] = 10 + assert a == [1, 2, 10] + a[-(z + 3)] = 3 + assert a == [3, 2, 10] + with assertRaises(IndexError): + a[z + 3] = 0 + with assertRaises(IndexError): + a[-(z + 4)] = 0 + assert a == [3, 2, 10] + +class C: + def __init__(self, x: i64) -> None: + self.x = x + +def test_attributes() -> None: + i: i64 + for i in range(-1000, 1000): + c = C(i) + assert c.x == i + c.x = i + 1 + assert c.x == i + 1 + +def test_mixed_comparisons() -> None: + i64_3: i64 = int() + 3 + int_5 = int() + 5 + assert i64_3 < int_5 + assert int_5 > i64_3 + b = i64_3 > int_5 + assert not b + + int_largest = int() + (1 << 63) - 1 + assert int_largest > i64_3 + int_smallest = int() - (1 << 63) + assert i64_3 > int_smallest + + int_too_big = int() + (1 << 63) + int_too_small = int() - (1 << 63) - 1 + with assertRaises(OverflowError): + assert i64_3 < int_too_big + with assertRaises(OverflowError): + assert int_too_big < i64_3 + with assertRaises(OverflowError): + assert i64_3 > int_too_small + with assertRaises(OverflowError): + assert int_too_small < i64_3 + +def test_mixed_comparisons_32bit() -> None: + # Test edge cases on 32-bit platforms + i64_3: i64 = int() + 3 + int_5 = int() + 5 + + int_largest_short = int() + (1 << 30) - 1 + int_largest_short_i64: i64 = int_largest_short + assert int_largest_short > i64_3 + int_smallest_short = int() - (1 << 30) + int_smallest_short_i64: i64 = int_smallest_short + assert i64_3 > int_smallest_short + + int_big = int() + (1 << 30) + assert int_big > i64_3 + int_small = int() - (1 << 30) - 1 + assert i64_3 > int_small + + assert int_smallest_short_i64 > int_small + assert int_largest_short_i64 < int_big + +def test_mixed_arithmetic_and_bitwise_ops() -> None: + i64_3: i64 = int() + 3 + int_5 = int() + 5 + assert i64_3 + int_5 == 8 + assert int_5 - i64_3 == 2 + assert i64_3 << int_5 == 96 + assert int_5 << i64_3 == 40 + assert i64_3 ^ int_5 == 6 + assert int_5 | i64_3 == 7 + + int_largest = int() + (1 << 63) - 1 + assert int_largest - i64_3 == 9223372036854775804 + int_smallest = int() - (1 << 63) + assert int_smallest + i64_3 == -9223372036854775805 + + int_too_big = int() + (1 << 63) + int_too_small = int() - (1 << 63) - 1 + with assertRaises(OverflowError): + assert i64_3 & int_too_big + with assertRaises(OverflowError): + assert int_too_small & i64_3 + +def test_for_loop() -> None: + n: i64 = 0 + for i in range(i64(5 + int())): + n += i + assert n == 10 + n = 0 + for i in range(i64(5)): + n += i + assert n == 10 + n = 0 + for i in range(i64(2 + int()), 5 + int()): + n += i + assert n == 9 + n = 0 + for i in range(2, i64(5 + int())): + n += i + assert n == 9 + assert sum([x * x for x in range(i64(4 + int()))]) == 1 + 4 + 9 + +def narrow1(x: Union[str, i64]) -> i64: + if isinstance(x, i64): + return x + return len(x) + +def narrow2(x: Union[str, i64]) -> i64: + if isinstance(x, int): + return x + return len(x) + +def test_isinstance() -> None: + assert narrow1(123) == 123 + assert narrow1("foobar") == 6 + assert narrow2(123) == 123 + assert narrow2("foobar") == 6 + +[case testI64ErrorValuesAndUndefined] +from typing import Any, Tuple +import sys + +from mypy_extensions import mypyc_attr, i64 +from typing_extensions import Final + +from testutil import assertRaises + +def maybe_raise(n: i64, error: bool) -> i64: + if error: + raise ValueError() + return n + +def test_error_value() -> None: + for i in range(-1000, 1000): + assert maybe_raise(i, False) == i + with assertRaises(ValueError): + maybe_raise(0, True) + +class C: + def maybe_raise(self, n: i64, error: bool) -> i64: + if error: + raise ValueError() + return n + +def test_method_error_value() -> None: + for i in range(-1000, 1000): + assert C().maybe_raise(i, False) == i + with assertRaises(ValueError): + C().maybe_raise(0, True) + +def maybe_raise_tuple(n: i64, error: bool) -> Tuple[i64, i64]: + if error: + raise ValueError() + return n, n+ 1 + +def test_tuple_error_value() -> None: + for i in range(-1000, 1000): + assert maybe_raise_tuple(i, False) == (i, i + 1) + with assertRaises(ValueError): + maybe_raise_tuple(0, True) + f: Any = maybe_raise_tuple + for i in range(-1000, 1000): + assert f(i, False) == (i, i + 1) + with assertRaises(ValueError): + f(0, True) + +def maybe_raise_tuple2(n: i64, error: bool) -> Tuple[i64, int]: + if error: + raise ValueError() + return n, n+ 1 + +def test_tuple_error_value_2() -> None: + for i in range(-1000, 1000): + assert maybe_raise_tuple2(i, False) == (i, i + 1) + with assertRaises(ValueError): + maybe_raise_tuple(0, True) + +def test_unbox_int() -> None: + for i in list(range(-1000, 1000)) + [-(1 << 63), (1 << 63) - 1]: + o: Any = i + x: i64 = i + assert x == i + y: i64 = o + assert y == i + +def test_unbox_int_fails() -> None: + o: Any = 'x' + if sys.version_info[0] == 3 and sys.version_info[1] < 10: + msg = "an integer is required (got type str)" + else: + msg = "'str' object cannot be interpreted as an integer" + with assertRaises(TypeError, msg): + x: i64 = o + o2: Any = 1 << 63 + with assertRaises(OverflowError, "int too large to convert to i64"): + y: i64 = o2 + o3: Any = -(1 << 63 + 1) + with assertRaises(OverflowError, "int too large to convert to i64"): + z: i64 = o3 + +class Uninit: + x: i64 + y: i64 = 0 + z: i64 + +class Derived(Uninit): + a: i64 = 1 + b: i64 + c: i64 = 2 + +class Derived2(Derived): + h: i64 + +def test_uninitialized_attr() -> None: + o = Uninit() + assert o.y == 0 + with assertRaises(AttributeError): + o.x + with assertRaises(AttributeError): + o.z + o.x = 1 + assert o.x == 1 + with assertRaises(AttributeError): + o.z + o.z = 2 + assert o.z == 2 + +# This is the error value, but it's also a valid normal value +MAGIC: Final = -113 + +def test_magic_value() -> None: + o = Uninit() + o.x = MAGIC + assert o.x == MAGIC + with assertRaises(AttributeError): + o.z + o.z = MAGIC + assert o.x == MAGIC + assert o.z == MAGIC + +def test_magic_value_via_any() -> None: + o: Any = Uninit() + with assertRaises(AttributeError): + o.x + with assertRaises(AttributeError): + o.z + o.x = MAGIC + assert o.x == MAGIC + with assertRaises(AttributeError): + o.z + o.z = MAGIC + assert o.z == MAGIC + +def test_magic_value_and_inheritance() -> None: + o = Derived2() + o.x = MAGIC + assert o.x == MAGIC + with assertRaises(AttributeError): + o.z + with assertRaises(AttributeError): + o.b + with assertRaises(AttributeError): + o.h + o.z = MAGIC + assert o.z == MAGIC + with assertRaises(AttributeError): + o.b + with assertRaises(AttributeError): + o.h + o.h = MAGIC + assert o.h == MAGIC + with assertRaises(AttributeError): + o.b + o.b = MAGIC + assert o.b == MAGIC + +@mypyc_attr(allow_interpreted_subclasses=True) +class MagicInit: + x: i64 = MAGIC + +def test_magic_value_as_initializer() -> None: + o = MagicInit() + assert o.x == MAGIC + +class ManyUninit: + a1: i64 + a2: i64 + a3: i64 + a4: i64 + a5: i64 + a6: i64 + a7: i64 + a8: i64 + a9: i64 + a10: i64 + a11: i64 + a12: i64 + a13: i64 + a14: i64 + a15: i64 + a16: i64 + a17: i64 + a18: i64 + a19: i64 + a20: i64 + a21: i64 + a22: i64 + a23: i64 + a24: i64 + a25: i64 + a26: i64 + a27: i64 + a28: i64 + a29: i64 + a30: i64 + a31: i64 + a32: i64 + a33: i64 + a34: i64 + a35: i64 + a36: i64 + a37: i64 + a38: i64 + a39: i64 + a40: i64 + a41: i64 + a42: i64 + a43: i64 + a44: i64 + a45: i64 + a46: i64 + a47: i64 + a48: i64 + a49: i64 + a50: i64 + a51: i64 + a52: i64 + a53: i64 + a54: i64 + a55: i64 + a56: i64 + a57: i64 + a58: i64 + a59: i64 + a60: i64 + a61: i64 + a62: i64 + a63: i64 + a64: i64 + a65: i64 + a66: i64 + a67: i64 + a68: i64 + a69: i64 + a70: i64 + a71: i64 + a72: i64 + a73: i64 + a74: i64 + a75: i64 + a76: i64 + a77: i64 + a78: i64 + a79: i64 + a80: i64 + a81: i64 + a82: i64 + a83: i64 + a84: i64 + a85: i64 + a86: i64 + a87: i64 + a88: i64 + a89: i64 + a90: i64 + a91: i64 + a92: i64 + a93: i64 + a94: i64 + a95: i64 + a96: i64 + a97: i64 + a98: i64 + a99: i64 + a100: i64 + +def test_many_uninitialized_attributes() -> None: + o = ManyUninit() + with assertRaises(AttributeError): + o.a1 + with assertRaises(AttributeError): + o.a10 + with assertRaises(AttributeError): + o.a20 + with assertRaises(AttributeError): + o.a30 + with assertRaises(AttributeError): + o.a31 + with assertRaises(AttributeError): + o.a32 + with assertRaises(AttributeError): + o.a33 + with assertRaises(AttributeError): + o.a40 + with assertRaises(AttributeError): + o.a50 + with assertRaises(AttributeError): + o.a60 + with assertRaises(AttributeError): + o.a62 + with assertRaises(AttributeError): + o.a63 + with assertRaises(AttributeError): + o.a64 + with assertRaises(AttributeError): + o.a65 + with assertRaises(AttributeError): + o.a80 + with assertRaises(AttributeError): + o.a100 + o.a30 = MAGIC + assert o.a30 == MAGIC + o.a31 = MAGIC + assert o.a31 == MAGIC + o.a32 = MAGIC + assert o.a32 == MAGIC + o.a33 = MAGIC + assert o.a33 == MAGIC + with assertRaises(AttributeError): + o.a34 + o.a62 = MAGIC + assert o.a62 == MAGIC + o.a63 = MAGIC + assert o.a63 == MAGIC + o.a64 = MAGIC + assert o.a64 == MAGIC + o.a65 = MAGIC + assert o.a65 == MAGIC + with assertRaises(AttributeError): + o.a66 + +class BaseNoBitmap: + x: int = 5 + +class DerivedBitmap(BaseNoBitmap): + # Subclass needs a bitmap, but base class doesn't have it. + y: i64 + +def test_derived_adds_bitmap() -> None: + d = DerivedBitmap() + d.x = 643 + b: BaseNoBitmap = d + assert b.x == 643 + +class Delete: + __deletable__ = ['x', 'y'] + x: i64 + y: i64 + +def test_del() -> None: + o = Delete() + o.x = MAGIC + o.y = -1 + assert o.x == MAGIC + assert o.y == -1 + del o.x + with assertRaises(AttributeError): + o.x + assert o.y == -1 + del o.y + with assertRaises(AttributeError): + o.y + o.x = 5 + assert o.x == 5 + with assertRaises(AttributeError): + o.y + del o.x + with assertRaises(AttributeError): + o.x + +class UndefinedTuple: + def __init__(self, x: i64, y: i64) -> None: + if x != 0: + self.t = (x, y) + +def test_undefined_native_int_tuple() -> None: + o = UndefinedTuple(MAGIC, MAGIC) + assert o.t[0] == MAGIC + assert o.t[1] == MAGIC + o = UndefinedTuple(0, 0) + with assertRaises(AttributeError): + o.t + o = UndefinedTuple(-13, 45) + assert o.t == (-13, 45) + +def test_undefined_native_int_tuple_via_any() -> None: + cls: Any = UndefinedTuple + o: Any = cls(MAGIC, MAGIC) + assert o.t[0] == MAGIC + assert o.t[1] == MAGIC + o = cls(0, 0) + with assertRaises(AttributeError): + o.t + o = UndefinedTuple(-13, 45) + assert o.t == (-13, 45) + +[case testI64DefaultArgValues] +from typing import Any, Iterator, Tuple +from typing_extensions import Final + +MAGIC: Final = -113 + +from mypy_extensions import i64 + +def f(x: i64, y: i64 = 5) -> i64: + return x + y + +def test_simple_default_arg() -> None: + assert f(3) == 8 + assert f(4, 9) == 13 + assert f(5, MAGIC) == -108 + for i in range(-1000, 1000): + assert f(1, i) == 1 + i + f2: Any = f + assert f2(3) == 8 + assert f2(4, 9) == 13 + assert f2(5, MAGIC) == -108 + +def g(a: i64, b: i64 = 1, c: int = 2, d: i64 = 3) -> i64: + return a + b + c + d + +def test_two_default_args() -> None: + assert g(10) == 16 + assert g(10, 2) == 17 + assert g(10, 2, 3) == 18 + assert g(10, 2, 3, 4) == 19 + g2: Any = g + assert g2(10) == 16 + assert g2(10, 2) == 17 + assert g2(10, 2, 3) == 18 + assert g2(10, 2, 3, 4) == 19 + +class C: + def __init__(self) -> None: + self.i: i64 = 1 + + def m(self, a: i64, b: i64 = 1, c: int = 2, d: i64 = 3) -> i64: + return self.i + a + b + c + d + +class D(C): + def m(self, a: i64, b: i64 = 2, c: int = 3, d: i64 = 4) -> i64: + return self.i + a + b + c + d + + def mm(self, a: i64 = 2, b: i64 = 1) -> i64: + return self.i + a + b + + @staticmethod + def s(a: i64 = 2, b: i64 = 1) -> i64: + return a + b + + @classmethod + def c(cls, a: i64 = 2, b: i64 = 3) -> i64: + assert cls is D + return a + b + +def test_method_default_args() -> None: + a = [C(), D()] + assert a[0].m(4) == 11 + d = D() + assert d.mm() == 4 + assert d.mm(5) == 7 + assert d.mm(MAGIC) == MAGIC + 2 + assert d.mm(b=5) == 8 + assert D.mm(d) == 4 + assert D.mm(d, 6) == 8 + assert D.mm(d, MAGIC) == MAGIC + 2 + assert D.mm(d, b=6) == 9 + dd: Any = d + assert dd.mm() == 4 + assert dd.mm(5) == 7 + assert dd.mm(MAGIC) == MAGIC + 2 + assert dd.mm(b=5) == 8 + +def test_static_method_default_args() -> None: + d = D() + assert d.s() == 3 + assert d.s(5) == 6 + assert d.s(MAGIC) == MAGIC + 1 + assert d.s(5, 6) == 11 + assert D.s() == 3 + assert D.s(5) == 6 + assert D.s(MAGIC) == MAGIC + 1 + assert D.s(5, 6) == 11 + dd: Any = d + assert dd.s() == 3 + assert dd.s(5) == 6 + assert dd.s(MAGIC) == MAGIC + 1 + assert dd.s(5, 6) == 11 + +def test_class_method_default_args() -> None: + d = D() + assert d.c() == 5 + assert d.c(5) == 8 + assert d.c(MAGIC) == MAGIC + 3 + assert d.c(b=5) == 7 + assert D.c() == 5 + assert D.c(5) == 8 + assert D.c(MAGIC) == MAGIC + 3 + assert D.c(b=5) == 7 + dd: Any = d + assert dd.c() == 5 + assert dd.c(5) == 8 + assert dd.c(MAGIC) == MAGIC + 3 + assert dd.c(b=5) == 7 + +class Init: + def __init__(self, x: i64 = 2, y: i64 = 5) -> None: + self.x = x + self.y = y + +def test_init_default_args() -> None: + o = Init() + assert o.x == 2 + assert o.y == 5 + o = Init(7, 8) + assert o.x == 7 + assert o.y == 8 + o = Init(4) + assert o.x == 4 + assert o.y == 5 + o = Init(MAGIC, MAGIC) + assert o.x == MAGIC + assert o.y == MAGIC + o = Init(3, MAGIC) + assert o.x == 3 + assert o.y == MAGIC + o = Init(MAGIC, 11) + assert o.x == MAGIC + assert o.y == 11 + o = Init(MAGIC) + assert o.x == MAGIC + assert o.y == 5 + o = Init(y=MAGIC) + assert o.x == 2 + assert o.y == MAGIC + +def kw_only(*, a: i64 = 1, b: int = 2, c: i64 = 3) -> i64: + return a + b + c * 2 + +def test_kw_only_default_args() -> None: + assert kw_only() == 9 + assert kw_only(a=2) == 10 + assert kw_only(b=4) == 11 + assert kw_only(c=11) == 25 + assert kw_only(a=2, c=4) == 12 + assert kw_only(c=4, a=2) == 12 + kw_only2: Any = kw_only + assert kw_only2() == 9 + assert kw_only2(a=2) == 10 + assert kw_only2(b=4) == 11 + assert kw_only2(c=11) == 25 + assert kw_only2(a=2, c=4) == 12 + assert kw_only2(c=4, a=2) == 12 + +def tuples(t: Tuple[i64, i64] = (MAGIC, MAGIC)) -> i64: + return t[0] + t[1] + +def test_tuple_arg_defaults() -> None: + assert tuples() == 2 * MAGIC + assert tuples((1, 2)) == 3 + assert tuples((MAGIC, MAGIC)) == 2 * MAGIC + tuples2: Any = tuples + assert tuples2() == 2 * MAGIC + assert tuples2((1, 2)) == 3 + assert tuples2((MAGIC, MAGIC)) == 2 * MAGIC + +class TupleInit: + def __init__(self, t: Tuple[i64, i64] = (MAGIC, MAGIC)) -> None: + self.t = t[0] + t[1] + +def test_tuple_init_arg_defaults() -> None: + assert TupleInit().t == 2 * MAGIC + assert TupleInit((1, 2)).t == 3 + assert TupleInit((MAGIC, MAGIC)).t == 2 * MAGIC + o: Any = TupleInit + assert o().t == 2 * MAGIC + assert o((1, 2)).t == 3 + assert o((MAGIC, MAGIC)).t == 2 * MAGIC + +def many_args( + a1: i64 = 0, + a2: i64 = 1, + a3: i64 = 2, + a4: i64 = 3, + a5: i64 = 4, + a6: i64 = 5, + a7: i64 = 6, + a8: i64 = 7, + a9: i64 = 8, + a10: i64 = 9, + a11: i64 = 10, + a12: i64 = 11, + a13: i64 = 12, + a14: i64 = 13, + a15: i64 = 14, + a16: i64 = 15, + a17: i64 = 16, + a18: i64 = 17, + a19: i64 = 18, + a20: i64 = 19, + a21: i64 = 20, + a22: i64 = 21, + a23: i64 = 22, + a24: i64 = 23, + a25: i64 = 24, + a26: i64 = 25, + a27: i64 = 26, + a28: i64 = 27, + a29: i64 = 28, + a30: i64 = 29, + a31: i64 = 30, + a32: i64 = 31, + a33: i64 = 32, + a34: i64 = 33, +) -> i64: + return a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9 + a10 + a11 + a12 + a13 + a14 + a15 + a16 + a17 + a18 + a19 + a20 + a21 + a22 + a23 + a24 + a25 + a26 + a27 + a28 + a29 + a30 + a31 + a32 + a33 + a34 + +def test_many_args() -> None: + assert many_args() == 561 + assert many_args(a1=100) == 661 + assert many_args(a2=101) == 661 + assert many_args(a15=114) == 661 + assert many_args(a31=130) == 661 + assert many_args(a32=131) == 661 + assert many_args(a33=232) == 761 + assert many_args(a34=333) == 861 + assert many_args(a1=100, a33=232) == 861 + f: Any = many_args + assert f() == 561 + assert f(a1=100) == 661 + assert f(a2=101) == 661 + assert f(a15=114) == 661 + assert f(a31=130) == 661 + assert f(a32=131) == 661 + assert f(a33=232) == 761 + assert f(a34=333) == 861 + assert f(a1=100, a33=232) == 861 + +def test_nested_function_defaults() -> None: + a: i64 = 1 + + def nested(x: i64 = 2, y: i64 = 3) -> i64: + return a + x + y + + assert nested() == 6 + assert nested(3) == 7 + assert nested(y=5) == 8 + assert nested(MAGIC) == MAGIC + 4 + a = 11 + assert nested() == 16 + + +def test_nested_function_defaults_via_any() -> None: + a: i64 = 1 + + def nested_native(x: i64 = 2, y: i64 = 3) -> i64: + return a + x + y + + nested: Any = nested_native + + assert nested() == 6 + assert nested(3) == 7 + assert nested(y=5) == 8 + assert nested(MAGIC) == MAGIC + 4 + a = 11 + assert nested() == 16 + +def gen(x: i64 = 1, y: i64 = 2) -> Iterator[i64]: + yield x + y + +def test_generator() -> None: + g = gen() + assert next(g) == 3 + g = gen(2) + assert next(g) == 4 + g = gen(2, 3) + assert next(g) == 5 + a: Any = gen + g = a() + assert next(g) == 3 + g = a(2) + assert next(g) == 4 + g = a(2, 3) + assert next(g) == 5 + +def magic_default(x: i64 = MAGIC) -> i64: + return x + +def test_magic_default() -> None: + assert magic_default() == MAGIC + assert magic_default(1) == 1 + assert magic_default(MAGIC) == MAGIC + a: Any = magic_default + assert a() == MAGIC + assert a(1) == 1 + assert a(MAGIC) == MAGIC + +[case testI64UndefinedLocal] +from typing_extensions import Final + +from mypy_extensions import i64, i32 + +from testutil import assertRaises + +MAGIC: Final = -113 + + +def test_conditionally_defined_local() -> None: + x = not int() + if x: + y: i64 = 5 + z: i32 = 6 + assert y == 5 + assert z == 6 + +def test_conditionally_undefined_local() -> None: + x = int() + if x: + y: i64 = 5 + z: i32 = 6 + else: + ok: i64 = 7 + assert ok == 7 + try: + print(y) + except NameError as e: + assert str(e) == 'local variable "y" referenced before assignment' + else: + assert False + try: + print(z) + except NameError as e: + assert str(e) == 'local variable "z" referenced before assignment' + else: + assert False + +def test_assign_error_value_conditionally() -> None: + x = int() + if not x: + y: i64 = MAGIC + z: i32 = MAGIC + assert y == MAGIC + assert z == MAGIC + +def tuple_case(x: i64, y: i64) -> None: + if not int(): + t = (x, y) + assert t == (x, y) + if int(): + t2 = (x, y) + try: + print(t2) + except NameError as e: + assert str(e) == 'local variable "t2" referenced before assignment' + else: + assert False + +def test_conditionally_undefined_tuple() -> None: + tuple_case(2, 3) + tuple_case(-2, -3) + tuple_case(MAGIC, MAGIC) + +def test_many_locals() -> None: + x = int() + if x: + a0: i64 = 0 + a1: i64 = 1 + a2: i64 = 2 + a3: i64 = 3 + a4: i64 = 4 + a5: i64 = 5 + a6: i64 = 6 + a7: i64 = 7 + a8: i64 = 8 + a9: i64 = 9 + a10: i64 = 10 + a11: i64 = 11 + a12: i64 = 12 + a13: i64 = 13 + a14: i64 = 14 + a15: i64 = 15 + a16: i64 = 16 + a17: i64 = 17 + a18: i64 = 18 + a19: i64 = 19 + a20: i64 = 20 + a21: i64 = 21 + a22: i64 = 22 + a23: i64 = 23 + a24: i64 = 24 + a25: i64 = 25 + a26: i64 = 26 + a27: i64 = 27 + a28: i64 = 28 + a29: i64 = 29 + a30: i64 = 30 + a31: i64 = 31 + a32: i64 = 32 + a33: i64 = 33 + with assertRaises(NameError): + print(a0) + with assertRaises(NameError): + print(a31) + with assertRaises(NameError): + print(a32) + with assertRaises(NameError): + print(a33) + a0 = 5 + assert a0 == 5 + with assertRaises(NameError): + print(a31) + with assertRaises(NameError): + print(a32) + with assertRaises(NameError): + print(a33) + a32 = 55 + assert a0 == 5 + assert a32 == 55 + with assertRaises(NameError): + print(a31) + with assertRaises(NameError): + print(a33) + a31 = 10 + a33 = 20 + assert a0 == 5 + assert a31 == 10 + assert a32 == 55 + assert a33 == 20 + +[case testI64GlueMethodsAndInheritance] +from typing import Any +from typing_extensions import Final + +from mypy_extensions import i64, trait + +from testutil import assertRaises + +MAGIC: Final = -113 + +class Base: + def foo(self) -> i64: + return 5 + + def bar(self, x: i64 = 2) -> i64: + return x + 1 + + def hoho(self, x: i64) -> i64: + return x - 1 + +class Derived(Base): + def foo(self, x: i64 = 5) -> i64: + return x + 10 + + def bar(self, x: i64 = 3, y: i64 = 20) -> i64: + return x + y + 2 + + def hoho(self, x: i64 = 7) -> i64: + return x - 2 + +def test_derived_adds_bitmap() -> None: + b: Base = Derived() + assert b.foo() == 15 + +def test_derived_adds_another_default_arg() -> None: + b: Base = Derived() + assert b.bar() == 25 + assert b.bar(1) == 23 + assert b.bar(MAGIC) == MAGIC + 22 + +def test_derived_switches_arg_to_have_default() -> None: + b: Base = Derived() + assert b.hoho(5) == 3 + assert b.hoho(MAGIC) == MAGIC - 2 + +@trait +class T: + @property + def x(self) -> i64: ... + @property + def y(self) -> i64: ... + +class C(T): + x: i64 = 1 + y: i64 = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C() + c.x = 5 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + assert c.y == 4 + c.y = 6 + assert c.y == 6 + t: T = C() + assert t.y == 4 + t = c + assert t.x == MAGIC + c.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class D(T): + xx: i64 + + @property + def x(self) -> i64: + return self.xx + + @property + def y(self) -> i64: + raise TypeError + +def test_read_only_property_in_trait_implemented_as_property() -> None: + d = D() + d.xx = 5 + assert d.x == 5 + d.xx = MAGIC + assert d.x == MAGIC + with assertRaises(TypeError): + d.y + t: T = d + assert t.x == MAGIC + d.xx = 6 + assert t.x == 6 + with assertRaises(TypeError): + t.y + +@trait +class T2: + x: i64 + y: i64 + +class C2(T2): + pass + +def test_inherit_trait_attribute() -> None: + c = C2() + c.x = 5 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + with assertRaises(AttributeError): + c.y + c.y = 6 + assert c.y == 6 + t: T2 = C2() + with assertRaises(AttributeError): + t.y + t = c + assert t.x == MAGIC + c.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 + +class D2(T2): + x: i64 + y: i64 = 4 + +def test_implement_trait_attribute() -> None: + d = D2() + d.x = 5 + assert d.x == 5 + d.x = MAGIC + assert d.x == MAGIC + assert d.y == 4 + d.y = 6 + assert d.y == 6 + t: T2 = D2() + assert t.y == 4 + t = d + assert t.x == MAGIC + d.x = 55 + assert t.x == 55 + assert t.y == 6 + a: Any = d + assert a.x == 55 + assert a.y == 6 + a.x = 7 + a.y = 8 + assert a.x == 7 + assert a.y == 8 diff --git a/mypyc/test-data/run-integers.test b/mypyc/test-data/run-integers.test index 74e7cd6b8fb7..c65f36110b46 100644 --- a/mypyc/test-data/run-integers.test +++ b/mypyc/test-data/run-integers.test @@ -353,6 +353,9 @@ def is_true(x: int) -> bool: else: return False +def is_true2(x: int) -> bool: + return bool(x) + def is_false(x: int) -> bool: if not x: return True @@ -361,11 +364,32 @@ def is_false(x: int) -> bool: def test_int_as_bool() -> None: assert not is_true(0) + assert not is_true2(0) assert is_false(0) for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): assert is_true(x) + assert is_true2(x) assert not is_false(x) +def bool_as_int(b: bool) -> int: + return b + +def bool_as_int2(b: bool) -> int: + return int(b) + +def test_bool_as_int() -> None: + assert bool_as_int(False) == 0 + assert bool_as_int(True) == 1 + assert bool_as_int2(False) == 0 + assert bool_as_int2(True) == 1 + +def no_op_conversion(n: int) -> int: + return int(n) + +def test_no_op_conversion() -> None: + for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): + assert no_op_conversion(x) == x + def test_divide() -> None: for x in range(-100, 100): for y in range(-100, 100): diff --git a/mypyc/test-data/run-match.test b/mypyc/test-data/run-match.test new file mode 100644 index 000000000000..7b7ad9a4342c --- /dev/null +++ b/mypyc/test-data/run-match.test @@ -0,0 +1,283 @@ +[case testTheBigMatch_python3_10] +class Person: + __match_args__ = ("name", "age") + + name: str + age: int + + def __init__(self, name: str, age: int) -> None: + self.name = name + self.age = age + + def __str__(self) -> str: + return f"Person(name={self.name!r}, age={self.age})" + + +def f(x: object) -> None: + match x: + case 123: + print("test 1") + + case 456 | 789: + print("test 2") + + case True | False | None: + print("test 3") + + case Person("bob" as name, age): + print(f"test 4 ({name=}, {age=})") + + case num if num == 5: + print("test 5") + + case 6 as num: + print(f"test 6 ({num=})") + + case (7 | "7") as value: + print(f"test 7 ({value=})") + + case Person("alice", age=123): + print("test 8") + + case Person("charlie", age=123 | 456): + print("test 9") + + case Person("dave", 123) as dave: + print(f"test 10 {dave}") + + case {"test": 11}: + print("test 11") + + case {"test": 12, **rest}: + print(f"test 12 (rest={rest})") + + case {}: + print("test map final") + + case ["test", 13]: + print("test 13") + + case ["test", 13, _]: + print("test 13b") + + case ["test", 14, *_]: + print("test 14") + + # TODO: Fix "rest" being used here coliding with above "rest" + case ["test", 15, *rest2]: + print(f"test 15 ({rest2})") + + case ["test", *rest3, 16]: + print(f"test 16 ({rest3})") + + case [*rest4, "test", 17]: + print(f"test 17 ({rest4})") + + case [*rest4, "test", 18, "some", "fluff"]: + print(f"test 18 ({rest4})") + + case str("test 19"): + print("test 19") + + case str(test_20) if test_20.startswith("test 20"): + print(f"test 20 ({test_20[7:]!r})") + + case ("test 21" as value) | ("test 21 as well" as value): + print(f"test 21 ({value[7:]!r})") + + case []: + print("test sequence final") + + case _: + print("test final") +[file driver.py] +from native import f, Person + +# test 1 +f(123) + +# test 2 +f(456) +f(789) + +# test 3 +f(True) +f(False) +f(None) + +# test 4 +f(Person("bob", 123)) + +# test 5 +f(5) + +# test 6 +f(6) + +# test 7 +f(7) +f("7") + +# test 8 +f(Person("alice", 123)) + +# test 9 +f(Person("charlie", 123)) +f(Person("charlie", 456)) + +# test 10 +f(Person("dave", 123)) + +# test 11 +f({"test": 11}) +f({"test": 11, "some": "key"}) + +# test 12 +f({"test": 12}) +f({"test": 12, "key": "value"}) +f({"test": 12, "key": "value", "abc": "123"}) + +# test map final +f({}) + +# test 13 +f(["test", 13]) + +# test 13b +f(["test", 13, "fail"]) + +# test 14 +f(["test", 14]) +f(["test", 14, "something"]) + +# test 15 +f(["test", 15]) +f(["test", 15, "something"]) + +# test 16 +f(["test", 16]) +f(["test", "filler", 16]) +f(["test", "more", "filler", 16]) + +# test 17 +f(["test", 17]) +f(["stuff", "test", 17]) +f(["more", "stuff", "test", 17]) + +# test 18 +f(["test", 18, "some", "fluff"]) +f(["stuff", "test", 18, "some", "fluff"]) +f(["more", "stuff", "test", 18, "some", "fluff"]) + +# test 19 +f("test 19") + +# test 20 +f("test 20") +f("test 20 something else") + +# test 21 +f("test 21") +f("test 21 as well") + +# test sequence final +f([]) + +# test final +f("") + +[out] +test 1 +test 2 +test 2 +test 3 +test 3 +test 3 +test 4 (name='bob', age=123) +test 5 +test 6 (num=6) +test 7 (value=7) +test 7 (value='7') +test 8 +test 9 +test 9 +test 10 Person(name='dave', age=123) +test 11 +test 11 +test 12 (rest={}) +test 12 (rest={'key': 'value'}) +test 12 (rest={'key': 'value', 'abc': '123'}) +test map final +test 13 +test 13b +test 14 +test 14 +test 15 ([]) +test 15 (['something']) +test 16 ([]) +test 16 (['filler']) +test 16 (['more', 'filler']) +test 17 ([]) +test 17 (['stuff']) +test 17 (['more', 'stuff']) +test 18 ([]) +test 18 (['stuff']) +test 18 (['more', 'stuff']) +test 19 +test 20 ('') +test 20 (' something else') +test 21 ('') +test 21 (' as well') +test sequence final +test final +[case testCustomMappingAndSequenceObjects_python3_10] +def f(x: object) -> None: + match x: + case {"key": "value", **rest}: + print(rest, type(rest)) + + case [1, 2, *rest2]: + print(rest2, type(rest2)) + +[file driver.py] +from collections.abc import Mapping, Sequence + +from native import f + +class CustomMapping(Mapping): + inner: dict + + def __init__(self, inner: dict) -> None: + self.inner = inner + + def __getitem__(self, key): + return self.inner[key] + + def __iter__(self): + return iter(self.inner) + + def __len__(self) -> int: + return len(self.inner) + + +class CustomSequence(Sequence): + inner: list + + def __init__(self, inner: list) -> None: + self.inner = inner + + def __getitem__(self, index: int) -> None: + return self.inner[index] + + def __len__(self) -> int: + return len(self.inner) + +mapping = CustomMapping({"key": "value", "some": "data"}) +sequence = CustomSequence([1, 2, 3]) + +f(mapping) +f(sequence) + +[out] +{'some': 'data'} +[3] diff --git a/mypyc/test-data/run-misc.test b/mypyc/test-data/run-misc.test index 001e0aa41b25..267a3441808f 100644 --- a/mypyc/test-data/run-misc.test +++ b/mypyc/test-data/run-misc.test @@ -1116,3 +1116,33 @@ i = b"foo" def test_redefinition() -> None: assert i == b"foo" + +[case testWithNative] +class DummyContext: + def __init__(self): + self.c = 0 + def __enter__(self) -> None: + self.c += 1 + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.c -= 1 + +def test_dummy_context() -> None: + c = DummyContext() + with c: + assert c.c == 1 + assert c.c == 0 + +[case testWithNativeVarArgs] +class DummyContext: + def __init__(self): + self.c = 0 + def __enter__(self) -> None: + self.c += 1 + def __exit__(self, *args: object) -> None: + self.c -= 1 + +def test_dummy_context() -> None: + c = DummyContext() + with c: + assert c.c == 1 + assert c.c == 0 diff --git a/mypyc/test-data/run-sets.test b/mypyc/test-data/run-sets.test index 98ac92d569b7..56c946933fac 100644 --- a/mypyc/test-data/run-sets.test +++ b/mypyc/test-data/run-sets.test @@ -115,3 +115,36 @@ from native import update s = {1, 2, 3} update(s, [5, 4, 3]) assert s == {1, 2, 3, 4, 5} + +[case testPrecomputedFrozenSets] +from typing import Any +from typing_extensions import Final + +CONST: Final = "CONST" +non_const = "non_const" + +def main_set(item: Any) -> bool: + return item in {None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST} + +def main_negated_set(item: Any) -> bool: + return item not in {None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST} + +def non_final_name_set(item: Any) -> bool: + return item in {non_const} + +s = set() +for i in {None, False, 1, 2.0, "3", b"4", 5j, (6,), CONST}: + s.add(i) + +def test_in_set() -> None: + for item in (None, False, 1, 2.0, "3", b"4", 5j, (6,), ((7,),), (), CONST): + assert main_set(item), f"{item!r} should be in set_main" + assert not main_negated_set(item), item + + assert non_final_name_set(non_const) + global non_const + non_const = "updated" + assert non_final_name_set("updated") + +def test_for_set() -> None: + assert not s ^ {None, False, 1, 2.0, "3", b"4", 5j, (6,), CONST}, s diff --git a/mypyc/test-data/run-strings.test b/mypyc/test-data/run-strings.test index c2b010bdb2bd..4a20c13ce789 100644 --- a/mypyc/test-data/run-strings.test +++ b/mypyc/test-data/run-strings.test @@ -136,6 +136,9 @@ def is_true(x: str) -> bool: else: return False +def is_true2(x: str) -> bool: + return bool(x) + def is_false(x: str) -> bool: if not x: return True @@ -145,8 +148,10 @@ def is_false(x: str) -> bool: def test_str_to_bool() -> None: assert is_false('') assert not is_true('') + assert not is_true2('') for x in 'a', 'foo', 'bar', 'some string': assert is_true(x) + assert is_true2(x) assert not is_false(x) def test_str_min_max() -> None: diff --git a/mypyc/test-data/run-tuples.test b/mypyc/test-data/run-tuples.test index 26b039320844..f6c92b9c720f 100644 --- a/mypyc/test-data/run-tuples.test +++ b/mypyc/test-data/run-tuples.test @@ -99,8 +99,6 @@ assert f(Sub(3, 2)) == 3 [case testNamedTupleClassSyntax] from typing import Dict, List, NamedTuple, Optional, Tuple, Union -class ClassIR: pass - class FuncIR: pass StealsDescription = Union[bool, List[bool]] @@ -119,8 +117,12 @@ class Record(NamedTuple): ordering: Optional[List[int]] extra_int_constants: List[Tuple[int]] +# Make sure mypyc loads the annotation string for this forward reference. +# Ref: https://github.com/mypyc/mypyc/issues/938 +class ClassIR: pass + [file driver.py] -from typing import Optional +from typing import ForwardRef, Optional from native import ClassIR, FuncIR, Record assert Record.__annotations__ == { @@ -129,7 +131,7 @@ assert Record.__annotations__ == { 'is_borrowed': bool, 'hash': str, 'python_path': tuple, - 'type': ClassIR, + 'type': ForwardRef('ClassIR'), 'method': FuncIR, 'shadow_method': type, 'classes': dict, diff --git a/mypyc/test/test_commandline.py b/mypyc/test/test_commandline.py index 1822cf13fe42..f66ca2ec8ff0 100644 --- a/mypyc/test/test_commandline.py +++ b/mypyc/test/test_commandline.py @@ -43,6 +43,9 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: with open(program_path, "w") as f: f.write(text) + env = os.environ.copy() + env["PYTHONPATH"] = base_path + out = b"" try: # Compile program @@ -51,9 +54,15 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd="tmp", + env=env, ) if "ErrorOutput" in testcase.name or cmd.returncode != 0: out += cmd.stdout + elif "WarningOutput" in testcase.name: + # Strip out setuptools build related output since we're only + # interested in the messages emitted during compilation. + messages, _, _ = cmd.stdout.partition(b"running build_ext") + out += messages if cmd.returncode == 0: # Run main program diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index 5be1e61cba8d..d7dcf3be532b 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -9,6 +9,7 @@ from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature, RuntimeArg from mypyc.ir.ops import ( + ERR_NEVER, Assign, AssignMulti, BasicBlock, @@ -103,7 +104,13 @@ def add_local(name: str, rtype: RType) -> Register: "tt", RTuple([RTuple([int_rprimitive, bool_rprimitive]), bool_rprimitive]) ) ir = ClassIR("A", "mod") - ir.attributes = {"x": bool_rprimitive, "y": int_rprimitive} + ir.attributes = { + "x": bool_rprimitive, + "y": int_rprimitive, + "i1": int64_rprimitive, + "i2": int32_rprimitive, + } + ir.bitmap_attrs = ["i1", "i2"] compute_vtable(ir) ir.mro = [ir] self.r = add_local("r", RInstance(ir)) @@ -397,6 +404,16 @@ def test_get_attr_merged(self) -> None: skip_next=True, ) + def test_get_attr_with_bitmap(self) -> None: + self.assert_emit( + GetAttr(self.r, "i1", 1), + """cpy_r_r0 = ((mod___AObject *)cpy_r_r)->_i1; + if (unlikely(cpy_r_r0 == -113) && !(((mod___AObject *)cpy_r_r)->bitmap & 1)) { + PyErr_SetString(PyExc_AttributeError, "attribute 'i1' of 'A' undefined"); + } + """, + ) + def test_set_attr(self) -> None: self.assert_emit( SetAttr(self.r, "y", self.m, 1), @@ -416,6 +433,62 @@ def test_set_attr_non_refcounted(self) -> None: """, ) + def test_set_attr_no_error(self) -> None: + op = SetAttr(self.r, "y", self.m, 1) + op.error_kind = ERR_NEVER + self.assert_emit( + op, + """if (((mod___AObject *)cpy_r_r)->_y != CPY_INT_TAG) { + CPyTagged_DECREF(((mod___AObject *)cpy_r_r)->_y); + } + ((mod___AObject *)cpy_r_r)->_y = cpy_r_m; + """, + ) + + def test_set_attr_non_refcounted_no_error(self) -> None: + op = SetAttr(self.r, "x", self.b, 1) + op.error_kind = ERR_NEVER + self.assert_emit( + op, + """((mod___AObject *)cpy_r_r)->_x = cpy_r_b; + """, + ) + + def test_set_attr_with_bitmap(self) -> None: + # For some rtypes the error value overlaps a valid value, so we need + # to use a separate bitmap to track defined attributes. + self.assert_emit( + SetAttr(self.r, "i1", self.i64, 1), + """if (unlikely(cpy_r_i64 == -113)) { + ((mod___AObject *)cpy_r_r)->bitmap |= 1; + } + ((mod___AObject *)cpy_r_r)->_i1 = cpy_r_i64; + cpy_r_r0 = 1; + """, + ) + self.assert_emit( + SetAttr(self.r, "i2", self.i32, 1), + """if (unlikely(cpy_r_i32 == -113)) { + ((mod___AObject *)cpy_r_r)->bitmap |= 2; + } + ((mod___AObject *)cpy_r_r)->_i2 = cpy_r_i32; + cpy_r_r0 = 1; + """, + ) + + def test_set_attr_init_with_bitmap(self) -> None: + op = SetAttr(self.r, "i1", self.i64, 1) + op.is_init = True + self.assert_emit( + op, + """if (unlikely(cpy_r_i64 == -113)) { + ((mod___AObject *)cpy_r_r)->bitmap |= 1; + } + ((mod___AObject *)cpy_r_r)->_i1 = cpy_r_i64; + cpy_r_r0 = 1; + """, + ) + def test_dict_get_item(self) -> None: self.assert_emit( CallC( diff --git a/mypyc/test/test_irbuild.py b/mypyc/test/test_irbuild.py index ba8014116e8a..cb5e690eed55 100644 --- a/mypyc/test/test_irbuild.py +++ b/mypyc/test/test_irbuild.py @@ -3,6 +3,7 @@ from __future__ import annotations import os.path +import sys from mypy.errors import CompileError from mypy.test.config import test_temp_dir @@ -23,6 +24,7 @@ files = [ "irbuild-basic.test", "irbuild-int.test", + "irbuild-bool.test", "irbuild-lists.test", "irbuild-tuple.test", "irbuild-dict.test", @@ -37,14 +39,20 @@ "irbuild-generics.test", "irbuild-try.test", "irbuild-strip-asserts.test", + "irbuild-i64.test", + "irbuild-i32.test", "irbuild-vectorcall.test", "irbuild-unreachable.test", "irbuild-isinstance.test", "irbuild-dunders.test", "irbuild-singledispatch.test", "irbuild-constant-fold.test", + "irbuild-glue-methods.test", ] +if sys.version_info >= (3, 10): + files.append("irbuild-match.test") + class TestGenOps(MypycDataSuite): files = files diff --git a/mypyc/test/test_ircheck.py b/mypyc/test/test_ircheck.py index 30ddd39fef0d..008963642272 100644 --- a/mypyc/test/test_ircheck.py +++ b/mypyc/test/test_ircheck.py @@ -5,7 +5,17 @@ from mypyc.analysis.ircheck import FnError, can_coerce_to, check_func_ir from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FuncDecl, FuncIR, FuncSignature -from mypyc.ir.ops import Assign, BasicBlock, Goto, Integer, LoadLiteral, Op, Register, Return +from mypyc.ir.ops import ( + Assign, + BasicBlock, + Goto, + Integer, + LoadAddress, + LoadLiteral, + Op, + Register, + Return, +) from mypyc.ir.pprint import format_func from mypyc.ir.rtypes import ( RInstance, @@ -16,6 +26,7 @@ int64_rprimitive, none_rprimitive, object_rprimitive, + pointer_rprimitive, str_rprimitive, ) @@ -88,7 +99,7 @@ def test_invalid_register_source(self) -> None: ret = Return(value=Register(type=none_rprimitive, name="r1")) block = self.basic_block([ret]) fn = FuncIR(decl=self.func_decl(name="func_1"), arg_regs=[], blocks=[block]) - assert_has_error(fn, FnError(source=ret, desc="Invalid op reference to register r1")) + assert_has_error(fn, FnError(source=ret, desc="Invalid op reference to register 'r1'")) def test_invalid_op_source(self) -> None: ret = Return(value=LoadLiteral(value="foo", rtype=str_rprimitive)) @@ -170,3 +181,19 @@ def test_pprint(self) -> None: " goto L1", " ERR: Invalid control operation target: 1", ] + + def test_load_address_declares_register(self) -> None: + rx = Register(str_rprimitive, "x") + ry = Register(pointer_rprimitive, "y") + load_addr = LoadAddress(pointer_rprimitive, rx) + assert_no_errors( + FuncIR( + decl=self.func_decl(name="func_1"), + arg_regs=[], + blocks=[ + self.basic_block( + ops=[load_addr, Assign(ry, load_addr), Return(value=NONE_VALUE)] + ) + ], + ) + ) diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index 62168ff4bb00..6a5ab87fca49 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -10,11 +10,12 @@ import shutil import subprocess import sys +import time from typing import Any, Iterator, cast from mypy import build from mypy.errors import CompileError -from mypy.options import Options +from mypy.options import TYPE_VAR_TUPLE, UNPACK, Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase from mypy.test.helpers import assert_module_equivalence, perform_file_operations @@ -38,6 +39,8 @@ "run-misc.test", "run-functions.test", "run-integers.test", + "run-i64.test", + "run-i32.test", "run-floats.test", "run-bools.test", "run-strings.test", @@ -65,6 +68,9 @@ if sys.version_info >= (3, 8): files.append("run-python38.test") +if sys.version_info >= (3, 10): + files.append("run-match.test") + setup_format = """\ from setuptools import setup from mypyc.build import mypycify @@ -167,6 +173,12 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None: # new by distutils, shift the mtime of all of the # generated artifacts back by a second. fudge_dir_mtimes(WORKDIR, -1) + # On Ubuntu, changing the mtime doesn't work reliably. As + # a workaround, sleep. + # + # TODO: Figure out a better approach, since this slows down tests. + if sys.platform == "linux": + time.sleep(1.0) step += 1 with chdir_manager(".."): @@ -183,7 +195,9 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> options.python_version = sys.version_info[:2] options.export_types = True options.preserve_asts = True + options.allow_empty_bodies = True options.incremental = self.separate + options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] # Avoid checking modules/packages named 'unchecked', to provide a way # to test interacting with code we don't have types for. @@ -241,7 +255,7 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> assert False, "Compile error" # Check that serialization works on this IR. (Only on the first - # step because the the returned ir only includes updated code.) + # step because the returned ir only includes updated code.) if incremental_step == 1: check_serialization_roundtrip(ir) @@ -298,6 +312,9 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> stderr=subprocess.STDOUT, env=env, ) + if sys.version_info >= (3, 12): + # TODO: testDecorators1 hangs on 3.12, remove this once fixed + proc.wait(timeout=30) output = proc.communicate()[0].decode("utf8") outlines = output.splitlines() diff --git a/mypyc/test/test_subtype.py b/mypyc/test/test_typeops.py similarity index 64% rename from mypyc/test/test_subtype.py rename to mypyc/test/test_typeops.py index 4a0d8737c852..f414edd1a2bb 100644 --- a/mypyc/test/test_subtype.py +++ b/mypyc/test/test_typeops.py @@ -1,16 +1,19 @@ -"""Test cases for is_subtype and is_runtime_subtype.""" +"""Test cases for various RType operations.""" from __future__ import annotations import unittest from mypyc.ir.rtypes import ( + RUnion, bit_rprimitive, bool_rprimitive, int32_rprimitive, int64_rprimitive, int_rprimitive, + object_rprimitive, short_int_rprimitive, + str_rprimitive, ) from mypyc.rt_subtype import is_runtime_subtype from mypyc.subtype import is_subtype @@ -50,3 +53,24 @@ def test_bit(self) -> None: def test_bool(self) -> None: assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) assert not is_runtime_subtype(bool_rprimitive, int_rprimitive) + + +class TestUnionSimplification(unittest.TestCase): + def test_simple_type_result(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive]) == int_rprimitive + + def test_remove_duplicate(self) -> None: + assert RUnion.make_simplified_union([int_rprimitive, int_rprimitive]) == int_rprimitive + + def test_cannot_simplify(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, str_rprimitive, object_rprimitive] + ) == RUnion([int_rprimitive, str_rprimitive, object_rprimitive]) + + def test_nested(self) -> None: + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, int_rprimitive])] + ) == RUnion([int_rprimitive, str_rprimitive]) + assert RUnion.make_simplified_union( + [int_rprimitive, RUnion([str_rprimitive, RUnion([int_rprimitive])])] + ) == RUnion([int_rprimitive, str_rprimitive]) diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index dc771b00551d..609ffc27385e 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -105,11 +105,13 @@ def build_ir_for_single_file2( compiler_options = compiler_options or CompilerOptions(capi_version=(3, 5)) options = Options() options.show_traceback = True + options.hide_error_codes = True options.use_builtins_fixtures = True options.strict_optional = True - options.python_version = (3, 6) + options.python_version = compiler_options.python_version or (3, 6) options.export_types = True options.preserve_asts = True + options.allow_empty_bodies = True options.per_module_options["__main__"] = {"mypyc": True} source = build.BuildSource("main", "__main__", program_text) @@ -275,6 +277,7 @@ def infer_ir_build_options_from_test_name(name: str) -> CompilerOptions | None: m = re.search(r"_python([3-9]+)_([0-9]+)(_|\b)", name) if m: options.capi_version = (int(m.group(1)), int(m.group(2))) + options.python_version = options.capi_version elif "_py" in name or "_Python" in name: assert False, f"Invalid _py* suffix (should be _pythonX_Y): {name}" return options diff --git a/mypyc/transform/exceptions.py b/mypyc/transform/exceptions.py index 3cfe6e5d3bd5..2851955ff38f 100644 --- a/mypyc/transform/exceptions.py +++ b/mypyc/transform/exceptions.py @@ -23,13 +23,17 @@ Branch, CallC, ComparisonOp, + GetAttr, Integer, LoadErrorValue, + Op, RegisterOp, Return, + SetAttr, + TupleGet, Value, ) -from mypyc.ir.rtypes import bool_rprimitive +from mypyc.ir.rtypes import RTuple, bool_rprimitive from mypyc.primitives.exc_ops import err_occurred_op from mypyc.primitives.registry import CFunctionDescription @@ -40,6 +44,7 @@ def insert_exception_handling(ir: FuncIR) -> None: # block. The block just returns an error value. error_label = None for block in ir.blocks: + adjust_error_kinds(block) can_raise = any(op.can_raise() for op in block.ops) if can_raise: error_label = add_handler_block(ir) @@ -97,13 +102,15 @@ def split_blocks_at_errors( # semantics, using a temporary bool with value false target = Integer(0, bool_rprimitive) elif op.error_kind == ERR_MAGIC_OVERLAPPING: - errvalue = Integer(int(target.type.c_undefined), rtype=op.type) - comp = ComparisonOp(target, errvalue, ComparisonOp.EQ) - cur_block.ops.append(comp) + comp = insert_overlapping_error_value_check(cur_block.ops, target) new_block2 = BasicBlock() new_blocks.append(new_block2) branch = Branch( - comp, true_label=new_block2, false_label=new_block, op=Branch.BOOL + comp, + true_label=new_block2, + false_label=new_block, + op=Branch.BOOL, + rare=True, ) cur_block.ops.append(branch) cur_block = new_block2 @@ -141,3 +148,32 @@ def primitive_call(desc: CFunctionDescription, args: list[Value], line: int) -> desc.error_kind, line, ) + + +def adjust_error_kinds(block: BasicBlock) -> None: + """Infer more precise error_kind attributes for ops. + + We have access here to more information than what was available + when the IR was initially built. + """ + for op in block.ops: + if isinstance(op, GetAttr): + if op.class_type.class_ir.is_always_defined(op.attr): + op.error_kind = ERR_NEVER + if isinstance(op, SetAttr): + if op.class_type.class_ir.is_always_defined(op.attr): + op.error_kind = ERR_NEVER + + +def insert_overlapping_error_value_check(ops: list[Op], target: Value) -> ComparisonOp: + """Append to ops to check for an overlapping error value.""" + typ = target.type + if isinstance(typ, RTuple): + item = TupleGet(target, 0) + ops.append(item) + return insert_overlapping_error_value_check(ops, item) + else: + errvalue = Integer(int(typ.c_undefined), rtype=typ) + op = ComparisonOp(target, errvalue, ComparisonOp.EQ) + ops.append(op) + return op diff --git a/mypyc/transform/uninit.py b/mypyc/transform/uninit.py index 0fa65be90295..6bf71ac4a8bc 100644 --- a/mypyc/transform/uninit.py +++ b/mypyc/transform/uninit.py @@ -3,11 +3,15 @@ from __future__ import annotations from mypyc.analysis.dataflow import AnalysisDict, analyze_must_defined_regs, cleanup_cfg, get_cfg +from mypyc.common import BITMAP_BITS from mypyc.ir.func_ir import FuncIR, all_values from mypyc.ir.ops import ( Assign, BasicBlock, Branch, + ComparisonOp, + Integer, + IntOp, LoadAddress, LoadErrorValue, Op, @@ -16,6 +20,7 @@ Unreachable, Value, ) +from mypyc.ir.rtypes import bitmap_rprimitive def insert_uninit_checks(ir: FuncIR) -> None: @@ -38,6 +43,8 @@ def split_blocks_at_uninits( init_registers = [] init_registers_set = set() + bitmap_registers: list[Register] = [] # Init status bitmaps + bitmap_backed: list[Register] = [] # These use bitmaps to track init status # First split blocks on ops that may raise. for block in blocks: @@ -70,15 +77,28 @@ def split_blocks_at_uninits( init_registers.append(src) init_registers_set.add(src) - cur_block.ops.append( - Branch( + if not src.type.error_overlap: + cur_block.ops.append( + Branch( + src, + true_label=error_block, + false_label=new_block, + op=Branch.IS_ERROR, + line=op.line, + ) + ) + else: + # We need to use bitmap for this one. + check_for_uninit_using_bitmap( + cur_block.ops, src, - true_label=error_block, - false_label=new_block, - op=Branch.IS_ERROR, - line=op.line, + bitmap_registers, + bitmap_backed, + error_block, + new_block, + op.line, ) - ) + raise_std = RaiseStandardError( RaiseStandardError.UNBOUND_LOCAL_ERROR, f'local variable "{src.name}" referenced before assignment', @@ -89,12 +109,82 @@ def split_blocks_at_uninits( cur_block = new_block cur_block.ops.append(op) + if bitmap_backed: + update_register_assignments_to_set_bitmap(new_blocks, bitmap_registers, bitmap_backed) + if init_registers: new_ops: list[Op] = [] for reg in init_registers: err = LoadErrorValue(reg.type, undefines=True) new_ops.append(err) new_ops.append(Assign(reg, err)) + for reg in bitmap_registers: + new_ops.append(Assign(reg, Integer(0, bitmap_rprimitive))) new_blocks[0].ops[0:0] = new_ops return new_blocks + + +def check_for_uninit_using_bitmap( + ops: list[Op], + src: Register, + bitmap_registers: list[Register], + bitmap_backed: list[Register], + error_block: BasicBlock, + ok_block: BasicBlock, + line: int, +) -> None: + """Check if src is defined using a bitmap. + + Modifies ops, bitmap_registers and bitmap_backed. + """ + if src not in bitmap_backed: + # Set up a new bitmap backed register. + bitmap_backed.append(src) + n = (len(bitmap_backed) - 1) // BITMAP_BITS + if len(bitmap_registers) <= n: + bitmap_registers.append(Register(bitmap_rprimitive, f"__locals_bitmap{n}")) + + index = bitmap_backed.index(src) + masked = IntOp( + bitmap_rprimitive, + bitmap_registers[index // BITMAP_BITS], + Integer(1 << (index & (BITMAP_BITS - 1)), bitmap_rprimitive), + IntOp.AND, + line, + ) + ops.append(masked) + chk = ComparisonOp(masked, Integer(0, bitmap_rprimitive), ComparisonOp.EQ) + ops.append(chk) + ops.append(Branch(chk, error_block, ok_block, Branch.BOOL)) + + +def update_register_assignments_to_set_bitmap( + blocks: list[BasicBlock], bitmap_registers: list[Register], bitmap_backed: list[Register] +) -> None: + """Update some assignments to registers to also set a bit in a bitmap. + + The bitmaps are used to track if a local variable has been assigned to. + + Modifies blocks. + """ + for block in blocks: + if any(isinstance(op, Assign) and op.dest in bitmap_backed for op in block.ops): + new_ops: list[Op] = [] + for op in block.ops: + if isinstance(op, Assign) and op.dest in bitmap_backed: + index = bitmap_backed.index(op.dest) + new_ops.append(op) + reg = bitmap_registers[index // BITMAP_BITS] + new = IntOp( + bitmap_rprimitive, + reg, + Integer(1 << (index & (BITMAP_BITS - 1)), bitmap_rprimitive), + IntOp.OR, + op.line, + ) + new_ops.append(new) + new_ops.append(Assign(reg, new)) + else: + new_ops.append(op) + block.ops = new_ops diff --git a/pyproject.toml b/pyproject.toml index 95f65599a130..328b9bf159a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,15 +1,27 @@ [build-system] requires = [ + # NOTE: this needs to be kept in sync with mypy-requirements.txt + # and build-requirements.txt, because those are both needed for + # self-typechecking :/ "setuptools >= 40.6.2", "wheel >= 0.30.0", + # the following is from mypy-requirements.txt + "typing_extensions>=3.10", + "mypy_extensions>=1.0.0", + "typed_ast>=1.4.0,<2; python_version<'3.8'", + "tomli>=1.1.0; python_version<'3.11'", + # the following is from build-requirements.txt + "types-psutil", + "types-setuptools", + "types-typed-ast>=1.5.8,<1.6.0", ] build-backend = "setuptools.build_meta" [tool.black] line-length = 99 -target-version = ['py37'] +target-version = ["py37", "py38", "py39", "py310", "py311"] skip-magic-trailing-comma = true -extend-exclude = ''' +force-exclude = ''' ^/mypy/typeshed| ^/mypyc/test-data| ^/test-data @@ -21,8 +33,8 @@ line_length = 99 combine_as_imports = true skip_gitignore = true extra_standard_library = ["typing_extensions"] -skip = [ - "mypy/typeshed", - "mypyc/test-data", - "test-data", +skip_glob = [ + "mypy/typeshed/*", + "mypyc/test-data/*", + "test-data/*", ] diff --git a/runtests.py b/runtests.py index c41f1db7e40f..ade0a8adee5e 100755 --- a/runtests.py +++ b/runtests.py @@ -8,8 +8,6 @@ # Slow test suites CMDLINE = "PythonCmdline" -SAMPLES = "SamplesSuite" -TYPESHED = "TypeshedSuite" PEP561 = "PEP561Suite" EVALUATION = "PythonEvaluation" DAEMON = "testdaemon" @@ -24,8 +22,6 @@ ALL_NON_FAST = [ CMDLINE, - SAMPLES, - TYPESHED, PEP561, EVALUATION, DAEMON, @@ -54,7 +50,7 @@ # Self type check "self": [executable, "-m", "mypy", "--config-file", "mypy_self_check.ini", "-p", "mypy"], # Lint - "lint": ["flake8", "-j0"], + "lint": ["flake8", "-j3"], "format-black": ["black", "."], "format-isort": ["isort", "."], # Fast test cases only (this is the bulk of the test suite) @@ -71,12 +67,10 @@ "pytest", "-q", "-k", - " or ".join([SAMPLES, TYPESHED, DAEMON, MYPYC_EXTERNAL, MYPYC_COMMAND_LINE, ERROR_STREAM]), + " or ".join([DAEMON, MYPYC_EXTERNAL, MYPYC_COMMAND_LINE, ERROR_STREAM]), ], # Test cases that might take minutes to run "pytest-extra": ["pytest", "-q", "-k", " or ".join(PYTEST_OPT_IN)], - # Test cases to run in typeshed CI - "typeshed-ci": ["pytest", "-q", "-k", " or ".join([CMDLINE, EVALUATION, SAMPLES, TYPESHED])], # Mypyc tests that aren't run by default, since they are slow and rarely # fail for commits that don't touch mypyc "mypyc-extra": ["pytest", "-q", "-k", " or ".join(MYPYC_OPT_IN)], @@ -85,7 +79,7 @@ # Stop run immediately if these commands fail FAST_FAIL = ["self", "lint"] -EXTRA_COMMANDS = ("pytest-extra", "mypyc-extra", "typeshed-ci") +EXTRA_COMMANDS = ("pytest-extra", "mypyc-extra") DEFAULT_COMMANDS = [cmd for cmd in cmds if cmd not in EXTRA_COMMANDS] assert all(cmd in cmds for cmd in FAST_FAIL) diff --git a/setup.cfg b/setup.cfg index 326b5bb53e96..511f794474e7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,10 +19,6 @@ exclude = # Sphinx configuration is irrelevant docs/source/conf.py, mypyc/doc/conf.py, - # conflicting styles - misc/*, - # conflicting styles - scripts/*, # tests have more relaxed styling requirements # fixtures have their own .pyi-specific configuration test-data/*, @@ -42,9 +38,8 @@ exclude = # B007: Loop control variable not used within the loop body. # B011: Don't use assert False # B023: Function definition does not bind loop variable -# F821: Name not defined (generates false positives with error codes) # E741: Ambiguous variable name -extend-ignore = E203,E501,W601,E402,B006,B007,B011,B023,F821,E741 +extend-ignore = E203,E501,W601,E402,B006,B007,B011,B023,E741 [coverage:run] branch = true diff --git a/setup.py b/setup.py index a8c86ff663a3..5d5ea06fb714 100644 --- a/setup.py +++ b/setup.py @@ -6,6 +6,7 @@ import os import os.path import sys +from typing import TYPE_CHECKING, Any if sys.version_info < (3, 7, 0): sys.stderr.write("ERROR: You need Python 3.7 or later to use mypy.\n") @@ -17,11 +18,14 @@ # This requires setuptools when building; setuptools is not needed # when installing from a wheel file (though it is still needed for # alternative forms of installing, as suggested by README.md). -from setuptools import find_packages, setup +from setuptools import Extension, find_packages, setup from setuptools.command.build_py import build_py from mypy.version import __version__ as version +if TYPE_CHECKING: + from typing_extensions import TypeGuard + description = "Optional static typing for Python" long_description = """ Mypy -- Optional Static Typing for Python @@ -36,6 +40,10 @@ """.lstrip() +def is_list_of_setuptools_extension(items: list[Any]) -> TypeGuard[list[Extension]]: + return all(isinstance(item, Extension) for item in items) + + def find_package_data(base, globs, root="mypy"): """Find all interesting data files, for setup(package_data=) @@ -79,8 +87,8 @@ def run(self): USE_MYPYC = False # To compile with mypyc, a mypyc checkout must be present on the PYTHONPATH -if len(sys.argv) > 1 and sys.argv[1] == "--use-mypyc": - sys.argv.pop(1) +if len(sys.argv) > 1 and "--use-mypyc" in sys.argv: + sys.argv.remove("--use-mypyc") USE_MYPYC = True if os.getenv("MYPY_USE_MYPYC", None) == "1": USE_MYPYC = True @@ -166,12 +174,14 @@ def run(self): # our Appveyor builds run out of memory sometimes. multi_file=sys.platform == "win32" or force_multifile, ) + assert is_list_of_setuptools_extension(ext_modules), "Expected mypycify to use setuptools" + else: ext_modules = [] classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", @@ -180,6 +190,7 @@ def run(self): "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Topic :: Software Development", "Typing :: Typed", ] @@ -212,7 +223,7 @@ def run(self): install_requires=[ "typed_ast >= 1.4.0, < 2; python_version<'3.8'", "typing_extensions>=3.10", - "mypy_extensions >= 0.4.3", + "mypy_extensions >= 1.0.0", "tomli>=1.1.0; python_version<'3.11'", ], # Same here. @@ -220,6 +231,7 @@ def run(self): "dmypy": "psutil >= 4.0", "python2": "typed_ast >= 1.4.0, < 2", "reports": "lxml", + "install-types": "pip", }, python_requires=">=3.7", include_package_data=True, diff --git a/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c b/test-data/packages/modulefinder/nsx-pkg3/nsx/c/c new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/packages/modulefinder/pkg1/a b/test-data/packages/modulefinder/pkg1/a new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test-data/unit/README.md b/test-data/unit/README.md index 39ab918faddb..6cf0b1bb26cf 100644 --- a/test-data/unit/README.md +++ b/test-data/unit/README.md @@ -176,6 +176,10 @@ full builtins and library stubs instead of minimal ones. Run them using Note that running more processes than logical cores is likely to significantly decrease performance. +To run tests with coverage: + + python3 -m pytest --cov mypy --cov-config setup.cfg --cov-report=term-missing:skip-covered --cov-report=html + Debugging --------- diff --git a/test-data/unit/check-abstract.test b/test-data/unit/check-abstract.test index e384cb89120b..566bb92d6e18 100644 --- a/test-data/unit/check-abstract.test +++ b/test-data/unit/check-abstract.test @@ -102,16 +102,16 @@ class B(A, I): pass from abc import abstractmethod, ABCMeta +class I(metaclass=ABCMeta): + @abstractmethod + def f(self): pass + o = None # type: object t = None # type: type o = I t = I -class I(metaclass=ABCMeta): - @abstractmethod - def f(self): pass - [case testAbstractClassInCasts] from typing import cast from abc import abstractmethod, ABCMeta @@ -314,8 +314,8 @@ class B(A): # E: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" \ # N: This violates the Liskov substitution principle \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - pass - def g(self, x: int) -> int: pass + return 0 + def g(self, x: int) -> int: return 0 [out] [case testImplementingAbstractMethodWithMultipleBaseClasses] @@ -328,13 +328,13 @@ class J(metaclass=ABCMeta): @abstractmethod def g(self, x: str) -> str: pass class A(I, J): - def f(self, x: str) -> int: pass \ + def f(self, x: str) -> int: return 0 \ # E: Argument 1 of "f" is incompatible with supertype "I"; supertype defines the argument type as "int" \ # N: This violates the Liskov substitution principle \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides - def g(self, x: str) -> int: pass \ + def g(self, x: str) -> int: return 0 \ # E: Return type "int" of "g" incompatible with return type "str" in supertype "J" - def h(self) -> int: pass # Not related to any base class + def h(self) -> int: return 0 # Not related to any base class [out] [case testImplementingAbstractMethodWithExtension] @@ -345,7 +345,7 @@ class J(metaclass=ABCMeta): def f(self, x: int) -> int: pass class I(J): pass class A(I): - def f(self, x: str) -> int: pass \ + def f(self, x: str) -> int: return 0 \ # E: Argument 1 of "f" is incompatible with supertype "J"; supertype defines the argument type as "int" \ # N: This violates the Liskov substitution principle \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides @@ -376,16 +376,16 @@ class I(metaclass=ABCMeta): def h(self, a: 'I') -> A: pass class A(I): def h(self, a: 'A') -> 'I': # Fail - pass + return A() def f(self, a: 'I') -> 'I': - pass + return A() def g(self, a: 'A') -> 'A': - pass + return A() [out] +main:11: error: Return type "I" of "h" incompatible with return type "A" in supertype "I" main:11: error: Argument 1 of "h" is incompatible with supertype "I"; supertype defines the argument type as "I" main:11: note: This violates the Liskov substitution principle main:11: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides -main:11: error: Return type "I" of "h" incompatible with return type "A" in supertype "I" -- Accessing abstract members @@ -672,7 +672,7 @@ class A(metaclass=ABCMeta): def __gt__(self, other: 'A') -> int: pass [case testAbstractOperatorMethods2] -import typing +from typing import cast, Any from abc import abstractmethod, ABCMeta class A(metaclass=ABCMeta): @abstractmethod @@ -681,7 +681,8 @@ class B: @abstractmethod def __add__(self, other: 'A') -> int: pass class C: - def __add__(self, other: int) -> B: pass + def __add__(self, other: int) -> B: + return cast(Any, None) [out] [case testAbstractClassWithAnyBase] @@ -761,7 +762,7 @@ class A(metaclass=ABCMeta): def x(self) -> int: pass class B(A): @property - def x(self) -> int: pass + def x(self) -> int: return 0 b = B() b.x() # E: "int" not callable [builtins fixtures/property.pyi] @@ -775,7 +776,7 @@ class A(metaclass=ABCMeta): def x(self, v: int) -> None: pass class B(A): @property - def x(self) -> int: pass + def x(self) -> int: return 0 @x.setter def x(self, v: int) -> None: pass b = B() @@ -789,7 +790,7 @@ class A(metaclass=ABCMeta): def x(self) -> int: pass class B(A): @property - def x(self) -> str: pass # E: Return type "str" of "x" incompatible with return type "int" in supertype "A" + def x(self) -> str: return "no" # E: Signature of "x" incompatible with supertype "A" b = B() b.x() # E: "str" not callable [builtins fixtures/property.pyi] @@ -850,7 +851,7 @@ class A(metaclass=ABCMeta): def x(self, v: int) -> None: pass class B(A): @property # E - def x(self) -> int: pass + def x(self) -> int: return 0 b = B() b.x.y # E [builtins fixtures/property.pyi] @@ -906,7 +907,7 @@ class C(Mixin, A): class A: @property def foo(cls) -> str: - pass + return "yes" class Mixin: foo = "foo" class C(Mixin, A): @@ -922,7 +923,7 @@ class Y(X): class A: @property def foo(cls) -> X: - pass + return X() class Mixin: foo = Y() class C(Mixin, A): @@ -934,7 +935,7 @@ class C(Mixin, A): class A: @property def foo(cls) -> str: - pass + return "no" class Mixin: foo = "foo" class C(A, Mixin): # E: Definition of "foo" in base class "A" is incompatible with definition in base class "Mixin" @@ -1024,7 +1025,7 @@ from abc import abstractmethod, ABCMeta from typing import Type, TypeVar T = TypeVar("T") -def deco(cls: Type[T]) -> Type[T]: ... +def deco(cls: Type[T]) -> Type[T]: return cls @deco class A(metaclass=ABCMeta): @@ -1050,3 +1051,579 @@ b: B b.x = 1 # E: Property "x" defined in "B" is read-only b.y = 1 [builtins fixtures/property.pyi] + + +-- Treatment of empty bodies in ABCs and protocols +-- ----------------------------------------------- + +[case testEmptyBodyProhibitedFunction] +# flags: --strict-optional +from typing import overload, Union + +def func1(x: str) -> int: pass # E: Missing return statement +def func2(x: str) -> int: ... # E: Missing return statement +def func3(x: str) -> int: # E: Missing return statement + """Some function.""" + +@overload +def func4(x: int) -> int: ... +@overload +def func4(x: str) -> str: ... +def func4(x: Union[int, str]) -> Union[int, str]: # E: Missing return statement + pass + +@overload +def func5(x: int) -> int: ... +@overload +def func5(x: str) -> str: ... +def func5(x: Union[int, str]) -> Union[int, str]: # E: Missing return statement + """Some function.""" + +[case testEmptyBodyProhibitedMethodNonAbstract] +# flags: --strict-optional +from typing import overload, Union + +class A: + def func1(self, x: str) -> int: pass # E: Missing return statement + def func2(self, x: str) -> int: ... # E: Missing return statement + def func3(self, x: str) -> int: # E: Missing return statement + """Some function.""" + +class B: + @classmethod + def func1(cls, x: str) -> int: pass # E: Missing return statement + @classmethod + def func2(cls, x: str) -> int: ... # E: Missing return statement + @classmethod + def func3(cls, x: str) -> int: # E: Missing return statement + """Some function.""" + +class C: + @overload + def func4(self, x: int) -> int: ... + @overload + def func4(self, x: str) -> str: ... + def func4(self, x: Union[int, str]) -> Union[int, str]: # E: Missing return statement + pass + + @overload + def func5(self, x: int) -> int: ... + @overload + def func5(self, x: str) -> str: ... + def func5(self, x: Union[int, str]) -> Union[int, str]: # E: Missing return statement + """Some function.""" +[builtins fixtures/classmethod.pyi] + +[case testEmptyBodyProhibitedPropertyNonAbstract] +# flags: --strict-optional +class A: + @property + def x(self) -> int: ... # E: Missing return statement + @property + def y(self) -> int: ... # E: Missing return statement + @y.setter + def y(self, value: int) -> None: ... + +class B: + @property + def x(self) -> int: pass # E: Missing return statement + @property + def y(self) -> int: pass # E: Missing return statement + @y.setter + def y(self, value: int) -> None: pass + +class C: + @property + def x(self) -> int: # E: Missing return statement + """Some property.""" + @property + def y(self) -> int: # E: Missing return statement + """Some property.""" + @y.setter + def y(self, value: int) -> None: pass +[builtins fixtures/property.pyi] + +[case testEmptyBodyNoteABCMeta] +# flags: --strict-optional +from abc import ABC + +class A(ABC): + def foo(self) -> int: # E: Missing return statement \ + # N: If the method is meant to be abstract, use @abc.abstractmethod + ... + +[case testEmptyBodyAllowedFunctionStub] +# flags: --strict-optional +import stub +[file stub.pyi] +from typing import overload, Union + +def func1(x: str) -> int: pass +def func2(x: str) -> int: ... +def func3(x: str) -> int: + """Some function.""" + +[case testEmptyBodyAllowedMethodNonAbstractStub] +# flags: --strict-optional +import stub +[file stub.pyi] +from typing import overload, Union + +class A: + def func1(self, x: str) -> int: pass + def func2(self, x: str) -> int: ... + def func3(self, x: str) -> int: + """Some function.""" + +class B: + @classmethod + def func1(cls, x: str) -> int: pass + @classmethod + def func2(cls, x: str) -> int: ... + @classmethod + def func3(cls, x: str) -> int: + """Some function.""" +[builtins fixtures/classmethod.pyi] + +[case testEmptyBodyAllowedPropertyNonAbstractStub] +# flags: --strict-optional +import stub +[file stub.pyi] +class A: + @property + def x(self) -> int: ... + @property + def y(self) -> int: ... + @y.setter + def y(self, value: int) -> None: ... + +class B: + @property + def x(self) -> int: pass + @property + def y(self) -> int: pass + @y.setter + def y(self, value: int) -> None: pass + +class C: + @property + def x(self) -> int: + """Some property.""" + @property + def y(self) -> int: + """Some property.""" + @y.setter + def y(self, value: int) -> None: pass +[builtins fixtures/property.pyi] + +[case testEmptyBodyAllowedMethodAbstract] +# flags: --strict-optional +from typing import overload, Union +from abc import abstractmethod + +class A: + @abstractmethod + def func1(self, x: str) -> int: pass + @abstractmethod + def func2(self, x: str) -> int: ... + @abstractmethod + def func3(self, x: str) -> int: + """Some function.""" + +class B: + @classmethod + @abstractmethod + def func1(cls, x: str) -> int: pass + @classmethod + @abstractmethod + def func2(cls, x: str) -> int: ... + @classmethod + @abstractmethod + def func3(cls, x: str) -> int: + """Some function.""" + +class C: + @overload + @abstractmethod + def func4(self, x: int) -> int: ... + @overload + @abstractmethod + def func4(self, x: str) -> str: ... + @abstractmethod + def func4(self, x: Union[int, str]) -> Union[int, str]: + pass + + @overload + @abstractmethod + def func5(self, x: int) -> int: ... + @overload + @abstractmethod + def func5(self, x: str) -> str: ... + @abstractmethod + def func5(self, x: Union[int, str]) -> Union[int, str]: + """Some function.""" +[builtins fixtures/classmethod.pyi] + +[case testEmptyBodyAllowedPropertyAbstract] +# flags: --strict-optional +from abc import abstractmethod +class A: + @property + @abstractmethod + def x(self) -> int: ... + @property + @abstractmethod + def y(self) -> int: ... + @y.setter + @abstractmethod + def y(self, value: int) -> None: ... + +class B: + @property + @abstractmethod + def x(self) -> int: pass + @property + @abstractmethod + def y(self) -> int: pass + @y.setter + @abstractmethod + def y(self, value: int) -> None: pass + +class C: + @property + @abstractmethod + def x(self) -> int: + """Some property.""" + @property + @abstractmethod + def y(self) -> int: + """Some property.""" + @y.setter + @abstractmethod + def y(self, value: int) -> None: pass +[builtins fixtures/property.pyi] + +[case testEmptyBodyImplicitlyAbstractProtocol] +# flags: --strict-optional +from typing import Protocol, overload, Union + +class P1(Protocol): + def meth(self) -> int: ... +class B1(P1): ... +class C1(P1): + def meth(self) -> int: + return 0 +B1() # E: Cannot instantiate abstract class "B1" with abstract attribute "meth" +C1() + +class P2(Protocol): + @classmethod + def meth(cls) -> int: ... +class B2(P2): ... +class C2(P2): + @classmethod + def meth(cls) -> int: + return 0 +B2() # E: Cannot instantiate abstract class "B2" with abstract attribute "meth" +C2() + +class P3(Protocol): + @overload + def meth(self, x: int) -> int: ... + @overload + def meth(self, x: str) -> str: ... +class B3(P3): ... +class C3(P3): + @overload + def meth(self, x: int) -> int: ... + @overload + def meth(self, x: str) -> str: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + return 0 +B3() # E: Cannot instantiate abstract class "B3" with abstract attribute "meth" +C3() +[builtins fixtures/classmethod.pyi] + +[case testEmptyBodyImplicitlyAbstractProtocolProperty] +# flags: --strict-optional +from typing import Protocol + +class P1(Protocol): + @property + def attr(self) -> int: ... +class B1(P1): ... +class C1(P1): + @property + def attr(self) -> int: + return 0 +B1() # E: Cannot instantiate abstract class "B1" with abstract attribute "attr" +C1() + +class P2(Protocol): + @property + def attr(self) -> int: ... + @attr.setter + def attr(self, value: int) -> None: ... +class B2(P2): ... +class C2(P2): + @property + def attr(self) -> int: return 0 + @attr.setter + def attr(self, value: int) -> None: pass +B2() # E: Cannot instantiate abstract class "B2" with abstract attribute "attr" +C2() +[builtins fixtures/property.pyi] + +[case testEmptyBodyImplicitlyAbstractProtocolStub] +# flags: --strict-optional +from stub import P1, P2, P3, P4 + +class B1(P1): ... +class B2(P2): ... +class B3(P3): ... +class B4(P4): ... + +B1() +B2() +B3() +B4() # E: Cannot instantiate abstract class "B4" with abstract attribute "meth" + +[file stub.pyi] +from typing import Protocol, overload, Union +from abc import abstractmethod + +class P1(Protocol): + def meth(self) -> int: ... + +class P2(Protocol): + @classmethod + def meth(cls) -> int: ... + +class P3(Protocol): + @overload + def meth(self, x: int) -> int: ... + @overload + def meth(self, x: str) -> str: ... + +class P4(Protocol): + @abstractmethod + def meth(self) -> int: ... +[builtins fixtures/classmethod.pyi] + +[case testEmptyBodyUnsafeAbstractSuper] +# flags: --strict-optional +from stub import StubProto, StubAbstract +from typing import Protocol +from abc import abstractmethod + +class Proto(Protocol): + def meth(self) -> int: ... +class ProtoDef(Protocol): + def meth(self) -> int: return 0 + +class Abstract: + @abstractmethod + def meth(self) -> int: ... +class AbstractDef: + @abstractmethod + def meth(self) -> int: return 0 + +class SubProto(Proto): + def meth(self) -> int: + return super().meth() # E: Call to abstract method "meth" of "Proto" with trivial body via super() is unsafe +class SubProtoDef(ProtoDef): + def meth(self) -> int: + return super().meth() + +class SubAbstract(Abstract): + def meth(self) -> int: + return super().meth() # E: Call to abstract method "meth" of "Abstract" with trivial body via super() is unsafe +class SubAbstractDef(AbstractDef): + def meth(self) -> int: + return super().meth() + +class SubStubProto(StubProto): + def meth(self) -> int: + return super().meth() +class SubStubAbstract(StubAbstract): + def meth(self) -> int: + return super().meth() + +[file stub.pyi] +from typing import Protocol +from abc import abstractmethod + +class StubProto(Protocol): + def meth(self) -> int: ... +class StubAbstract: + @abstractmethod + def meth(self) -> int: ... + +[case testEmptyBodyUnsafeAbstractSuperProperty] +# flags: --strict-optional +from stub import StubProto, StubAbstract +from typing import Protocol +from abc import abstractmethod + +class Proto(Protocol): + @property + def attr(self) -> int: ... +class SubProto(Proto): + @property + def attr(self) -> int: return super().attr # E: Call to abstract method "attr" of "Proto" with trivial body via super() is unsafe + +class ProtoDef(Protocol): + @property + def attr(self) -> int: return 0 +class SubProtoDef(ProtoDef): + @property + def attr(self) -> int: return super().attr + +class Abstract: + @property + @abstractmethod + def attr(self) -> int: ... +class SubAbstract(Abstract): + @property + @abstractmethod + def attr(self) -> int: return super().attr # E: Call to abstract method "attr" of "Abstract" with trivial body via super() is unsafe + +class AbstractDef: + @property + @abstractmethod + def attr(self) -> int: return 0 +class SubAbstractDef(AbstractDef): + @property + @abstractmethod + def attr(self) -> int: return super().attr + +class SubStubProto(StubProto): + @property + def attr(self) -> int: return super().attr +class SubStubAbstract(StubAbstract): + @property + def attr(self) -> int: return super().attr + +[file stub.pyi] +from typing import Protocol +from abc import abstractmethod + +class StubProto(Protocol): + @property + def attr(self) -> int: ... +class StubAbstract: + @property + @abstractmethod + def attr(self) -> int: ... +[builtins fixtures/property.pyi] + +[case testEmptyBodyUnsafeAbstractSuperOverloads] +# flags: --strict-optional +from stub import StubProto +from typing import Protocol, overload, Union + +class ProtoEmptyImpl(Protocol): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + raise NotImplementedError +class ProtoDefImpl(Protocol): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + return 0 +class ProtoNoImpl(Protocol): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + +class SubProtoEmptyImpl(ProtoEmptyImpl): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + return super().meth(0) # E: Call to abstract method "meth" of "ProtoEmptyImpl" with trivial body via super() is unsafe +class SubProtoDefImpl(ProtoDefImpl): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + return super().meth(0) +class SubStubProto(StubProto): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + return super().meth(0) + +# TODO: it would be good to also give an error in this case. +class SubProtoNoImpl(ProtoNoImpl): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + def meth(self, x: Union[int, str]) -> Union[int, str]: + return super().meth(0) + +[file stub.pyi] +from typing import Protocol, overload + +class StubProto(Protocol): + @overload + def meth(self, x: str) -> str: ... + @overload + def meth(self, x: int) -> int: ... + +[builtins fixtures/exception.pyi] + +[case testEmptyBodyNoSuperWarningWithoutStrict] +# flags: --no-strict-optional +from typing import Protocol +from abc import abstractmethod + +class Proto(Protocol): + def meth(self) -> int: ... +class Abstract: + @abstractmethod + def meth(self) -> int: ... + +class SubProto(Proto): + def meth(self) -> int: + return super().meth() +class SubAbstract(Abstract): + def meth(self) -> int: + return super().meth() + +[case testEmptyBodyNoSuperWarningOptionalReturn] +# flags: --strict-optional +from typing import Protocol, Optional +from abc import abstractmethod + +class Proto(Protocol): + def meth(self) -> Optional[int]: pass +class Abstract: + @abstractmethod + def meth(self) -> Optional[int]: pass + +class SubProto(Proto): + def meth(self) -> Optional[int]: + return super().meth() +class SubAbstract(Abstract): + def meth(self) -> Optional[int]: + return super().meth() + +[case testEmptyBodyTypeCheckingOnly] +# flags: --strict-optional +from typing import TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def dynamic(self) -> int: ... # OK diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 195e70cf5880..40efe2d2cece 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -925,3 +925,33 @@ async def f() -> AsyncGenerator[int, None]: [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testAwaitUnion] +from typing import overload, Union + +class A: ... +class B: ... + +@overload +async def foo(x: A) -> B: ... +@overload +async def foo(x: B) -> A: ... +async def foo(x): ... + +async def bar(x: Union[A, B]) -> None: + reveal_type(await foo(x)) # N: Revealed type is "Union[__main__.B, __main__.A]" + +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + +[case testInvalidComprehensionNoCrash] +async def foo(x: int) -> int: ... + +crasher = [await foo(x) for x in [1, 2, 3]] # E: "await" outside function + +def bad() -> None: + y = [await foo(x) for x in [1, 2, 3]] # E: "await" outside coroutine ("async def") +async def good() -> None: + y = [await foo(x) for x in [1, 2, 3]] # OK +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-attr.test index c5b64ee61376..f6ef289e792e 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-attr.test @@ -1,4 +1,4 @@ -[case testAttrsSimple] +[case testAttrsSimple_no_empty] import attr @attr.s class A: @@ -151,9 +151,9 @@ class D: [case testAttrsNotBooleans] import attr x = True -@attr.s(cmp=x) # E: "cmp" argument must be True or False. +@attr.s(cmp=x) # E: "cmp" argument must be a True, False, or None literal class A: - a = attr.ib(init=x) # E: "init" argument must be True or False. + a = attr.ib(init=x) # E: "init" argument must be a True or False literal [builtins fixtures/bool.pyi] [case testAttrsInitFalse] @@ -1428,7 +1428,7 @@ class B(A): reveal_type(B) # N: Revealed type is "def (foo: builtins.int) -> __main__.B" [builtins fixtures/bool.pyi] -[case testAttrsClassHasAttributeWithAttributes] +[case testAttrsClassHasMagicAttribute] import attr @attr.s @@ -1436,14 +1436,14 @@ class A: b: int = attr.ib() c: str = attr.ib() -reveal_type(A.__attrs_attrs__) # N: Revealed type is "Tuple[attr.Attribute[builtins.int], attr.Attribute[builtins.str], fallback=__main__.A._AttrsAttributes]" +reveal_type(A.__attrs_attrs__) # N: Revealed type is "Tuple[attr.Attribute[builtins.int], attr.Attribute[builtins.str], fallback=__main__.A.____main___A_AttrsAttributes__]" reveal_type(A.__attrs_attrs__[0]) # N: Revealed type is "attr.Attribute[builtins.int]" reveal_type(A.__attrs_attrs__.b) # N: Revealed type is "attr.Attribute[builtins.int]" -A.__attrs_attrs__.x # E: "_AttrsAttributes" has no attribute "x" +A.__attrs_attrs__.x # E: "____main___A_AttrsAttributes__" has no attribute "x" [builtins fixtures/attr.pyi] -[case testAttrsBareClassHasAttributeWithAttributes] +[case testAttrsBareClassHasMagicAttribute] import attr @attr.s @@ -1451,14 +1451,14 @@ class A: b = attr.ib() c = attr.ib() -reveal_type(A.__attrs_attrs__) # N: Revealed type is "Tuple[attr.Attribute[Any], attr.Attribute[Any], fallback=__main__.A._AttrsAttributes]" +reveal_type(A.__attrs_attrs__) # N: Revealed type is "Tuple[attr.Attribute[Any], attr.Attribute[Any], fallback=__main__.A.____main___A_AttrsAttributes__]" reveal_type(A.__attrs_attrs__[0]) # N: Revealed type is "attr.Attribute[Any]" reveal_type(A.__attrs_attrs__.b) # N: Revealed type is "attr.Attribute[Any]" -A.__attrs_attrs__.x # E: "_AttrsAttributes" has no attribute "x" +A.__attrs_attrs__.x # E: "____main___A_AttrsAttributes__" has no attribute "x" [builtins fixtures/attr.pyi] -[case testAttrsNGClassHasAttributeWithAttributes] +[case testAttrsNGClassHasMagicAttribute] import attr @attr.define @@ -1466,10 +1466,53 @@ class A: b: int c: str -reveal_type(A.__attrs_attrs__) # N: Revealed type is "Tuple[attr.Attribute[builtins.int], attr.Attribute[builtins.str], fallback=__main__.A._AttrsAttributes]" +reveal_type(A.__attrs_attrs__) # N: Revealed type is "Tuple[attr.Attribute[builtins.int], attr.Attribute[builtins.str], fallback=__main__.A.____main___A_AttrsAttributes__]" reveal_type(A.__attrs_attrs__[0]) # N: Revealed type is "attr.Attribute[builtins.int]" reveal_type(A.__attrs_attrs__.b) # N: Revealed type is "attr.Attribute[builtins.int]" -A.__attrs_attrs__.x # E: "_AttrsAttributes" has no attribute "x" +A.__attrs_attrs__.x # E: "____main___A_AttrsAttributes__" has no attribute "x" + +[builtins fixtures/attr.pyi] + +[case testAttrsMagicAttributeProtocol] +import attr +from typing import Any, Protocol, Type, ClassVar + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +@attr.define +class A: + b: int + c: str + +def takes_attrs_cls(cls: Type[AttrsInstance]) -> None: + pass + +def takes_attrs_instance(inst: AttrsInstance) -> None: + pass + +takes_attrs_cls(A) +takes_attrs_instance(A(1, "")) + +takes_attrs_cls(A(1, "")) # E: Argument 1 to "takes_attrs_cls" has incompatible type "A"; expected "Type[AttrsInstance]" +takes_attrs_instance(A) # E: Argument 1 to "takes_attrs_instance" has incompatible type "Type[A]"; expected "AttrsInstance" # N: ClassVar protocol member AttrsInstance.__attrs_attrs__ can never be matched by a class object +[builtins fixtures/attr.pyi] + +[case testAttrsInitMethodAlwaysGenerates] +from typing import Tuple +import attr + +@attr.define(init=False) +class A: + b: int + c: str + def __init__(self, bc: Tuple[int, str]) -> None: + b, c = bc + self.__attrs_init__(b, c) + +reveal_type(A) # N: Revealed type is "def (bc: Tuple[builtins.int, builtins.str]) -> __main__.A" +reveal_type(A.__init__) # N: Revealed type is "def (self: __main__.A, bc: Tuple[builtins.int, builtins.str])" +reveal_type(A.__attrs_init__) # N: Revealed type is "def (self: __main__.A, b: builtins.int, c: builtins.str)" [builtins fixtures/attr.pyi] @@ -1745,3 +1788,82 @@ class C: c = C(x=[C.D()]) reveal_type(c.x) # N: Revealed type is "builtins.list[__main__.C.D]" [builtins fixtures/list.pyi] + +[case testRedefinitionInFrozenClassNoCrash] +import attr + +@attr.s +class MyData: + is_foo: bool = attr.ib() + + @staticmethod # E: Name "is_foo" already defined on line 5 + def is_foo(string: str) -> bool: ... +[builtins fixtures/classmethod.pyi] + +[case testOverrideWithPropertyInFrozenClassNoCrash] +from attrs import frozen + +@frozen(kw_only=True) +class Base: + name: str + +@frozen(kw_only=True) +class Sub(Base): + first_name: str + last_name: str + + @property + def name(self) -> str: ... +[builtins fixtures/property.pyi] + +[case testOverrideWithPropertyInFrozenClassChecked] +from attrs import frozen + +@frozen(kw_only=True) +class Base: + name: str + +@frozen(kw_only=True) +class Sub(Base): + first_name: str + last_name: str + + @property + def name(self) -> int: ... # E: Signature of "name" incompatible with supertype "Base" + +# This matches runtime semantics +reveal_type(Sub) # N: Revealed type is "def (*, name: builtins.str, first_name: builtins.str, last_name: builtins.str) -> __main__.Sub" +[builtins fixtures/property.pyi] + +[case testFinalInstanceAttribute] +from attrs import define +from typing import Final + +@define +class C: + a: Final[int] + +reveal_type(C) # N: Revealed type is "def (a: builtins.int) -> __main__.C" + +C(1).a = 2 # E: Cannot assign to final attribute "a" + +[builtins fixtures/property.pyi] + +[case testFinalInstanceAttributeInheritance] +from attrs import define +from typing import Final + +@define +class C: + a: Final[int] + +@define +class D(C): + b: Final[str] + +reveal_type(D) # N: Revealed type is "def (a: builtins.int, b: builtins.str) -> __main__.D" + +D(1, "").a = 2 # E: Cannot assign to final attribute "a" +D(1, "").b = "2" # E: Cannot assign to final attribute "b" + +[builtins fixtures/property.pyi] diff --git a/test-data/unit/check-basic.test b/test-data/unit/check-basic.test index 238aab3944ff..c16b9e40122d 100644 --- a/test-data/unit/check-basic.test +++ b/test-data/unit/check-basic.test @@ -12,25 +12,26 @@ class A: pass class B: pass [case testConstructionAndAssignment] -x = None # type: A -x = A() -if int(): - x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") class A: def __init__(self): pass class B: def __init__(self): pass +x = None # type: A +x = A() +if int(): + x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testInheritInitFromObject] +class A(object): pass +class B(object): pass x = None # type: A if int(): x = A() if int(): x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A(object): pass -class B(object): pass - [case testImplicitInheritInitFromObject] +class A: pass +class B: pass x = None # type: A o = None # type: object if int(): @@ -39,10 +40,6 @@ if int(): x = A() if int(): o = x -class A: pass -class B: pass -[out] - [case testTooManyConstructorArgs] import typing object(object()) @@ -51,21 +48,15 @@ main:2: error: Too many arguments for "object" [case testVarDefWithInit] import typing -a = A() # type: A -b = object() # type: A class A: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") - +a = A() # type: A +b = object() # type: A # E: Incompatible types in assignment (expression has type "object", variable has type "A") [case testInheritanceBasedSubtyping] import typing -x = B() # type: A -y = A() # type: B # Fail class A: pass class B(A): pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") - +x = B() # type: A +y = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testDeclaredVariableInParentheses] (x) = None # type: int @@ -101,32 +92,22 @@ w = 1 # E: Incompatible types in assignment (expression has type "int", variabl [case testFunction] import typing -def f(x: 'A') -> None: pass -f(A()) -f(B()) # Fail class A: pass class B: pass -[out] -main:4: error: Argument 1 to "f" has incompatible type "B"; expected "A" - +def f(x: 'A') -> None: pass +f(A()) +f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" [case testNotCallable] import typing -A()() class A: pass -[out] -main:2: error: "A" not callable - +A()() # E: "A" not callable [case testSubtypeArgument] import typing -def f(x: 'A', y: 'B') -> None: pass -f(B(), A()) # Fail -f(B(), B()) - class A: pass class B(A): pass -[out] -main:3: error: Argument 2 to "f" has incompatible type "A"; expected "B" - +def f(x: 'A', y: 'B') -> None: pass +f(B(), A()) # E: Argument 2 to "f" has incompatible type "A"; expected "B" +f(B(), B()) [case testInvalidArgumentCount] import typing def f(x, y) -> None: pass @@ -194,12 +175,10 @@ main:4: error: Incompatible types in assignment (expression has type "B", variab [case testVariableInitializationWithSubtype] import typing -x = B() # type: A -y = A() # type: B # Fail class A: pass class B(A): pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +x = B() # type: A +y = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") -- Misc @@ -217,15 +196,11 @@ main:3: error: Incompatible return value type (got "B", expected "A") [case testTopLevelContextAndInvalidReturn] import typing -def f() -> 'A': - return B() -a = B() # type: A class A: pass class B: pass -[out] -main:3: error: Incompatible return value type (got "B", expected "A") -main:4: error: Incompatible types in assignment (expression has type "B", variable has type "A") - +def f() -> 'A': + return B() # E: Incompatible return value type (got "B", expected "A") +a = B() # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testEmptyReturnInAnyTypedFunction] from typing import Any def f() -> Any: @@ -252,6 +227,8 @@ reveal_type(__annotations__) # N: Revealed type is "builtins.dict[builtins.str, [case testLocalVariableShadowing] +class A: pass +class B: pass a = None # type: A if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") @@ -263,10 +240,6 @@ def f() -> None: a = B() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") a = A() - -class A: pass -class B: pass - [case testGlobalDefinedInBlockWithType] class A: pass while A: @@ -300,12 +273,15 @@ main:4: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "i main:5: error: Incompatible return value type (got "int", expected "str") main:6: error: Argument 1 to "f" of "A" has incompatible type "str"; expected "int" -[case testTrailingCommaParsing-skip] +[case testTrailingCommaParsing] x = 1 -x in 1, -if x in 1, : - pass +x in 1, # E: Unsupported right operand type for in ("int") +[builtins fixtures/tuple.pyi] + +[case testTrailingCommaInIfParsing] +if x in 1, : pass [out] +main:1: error: invalid syntax [case testInitReturnTypeError] class C: @@ -347,7 +323,8 @@ from typing import Union class A: ... class B: ... -x: Union[mock, A] # E: Module "mock" is not valid as a type +x: Union[mock, A] # E: Module "mock" is not valid as a type \ + # N: Perhaps you meant to use a protocol matching the module structure? if isinstance(x, B): pass @@ -363,7 +340,8 @@ from typing import overload, Any, Union @overload def f(x: int) -> int: ... @overload -def f(x: str) -> Union[mock, str]: ... # E: Module "mock" is not valid as a type +def f(x: str) -> Union[mock, str]: ... # E: Module "mock" is not valid as a type \ + # N: Perhaps you meant to use a protocol matching the module structure? def f(x): pass @@ -388,15 +366,6 @@ b = none.__bool__() reveal_type(b) # N: Revealed type is "Literal[False]" [builtins fixtures/bool.pyi] -[case testNoneHasBoolShowNoneErrorsFalse] -none = None -b = none.__bool__() -reveal_type(b) # N: Revealed type is "Literal[False]" -[builtins fixtures/bool.pyi] -[file mypy.ini] -\[mypy] -show_none_errors = False - [case testAssignmentInvariantNoteForList] from typing import List x: List[int] diff --git a/test-data/unit/check-bound.test b/test-data/unit/check-bound.test index bf13ef874579..eb97bde32e1f 100644 --- a/test-data/unit/check-bound.test +++ b/test-data/unit/check-bound.test @@ -215,3 +215,13 @@ if int(): b = 'a' # E: Incompatible types in assignment (expression has type "str", variable has type "int") twice(a) # E: Value of type variable "T" of "twice" cannot be "int" [builtins fixtures/args.pyi] + + +[case testIterableBoundUnpacking] +from typing import Tuple, TypeVar +TupleT = TypeVar("TupleT", bound=Tuple[int, ...]) +def f(t: TupleT) -> None: + a, *b = t + reveal_type(a) # N: Revealed type is "builtins.int" + reveal_type(b) # N: Revealed type is "builtins.list[builtins.int]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-class-namedtuple.test b/test-data/unit/check-class-namedtuple.test index ecc81f3cee33..8ae7f6555f9d 100644 --- a/test-data/unit/check-class-namedtuple.test +++ b/test-data/unit/check-class-namedtuple.test @@ -393,8 +393,6 @@ class X(typing.NamedTuple): [out] main:6: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" main:7: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" -main:7: error: Type cannot be declared in assignment to non-self attribute -main:7: error: "int" has no attribute "x" main:9: error: Non-default NamedTuple fields cannot follow default fields [builtins fixtures/list.pyi] @@ -585,8 +583,8 @@ class Base(NamedTuple): reveal_type(self[T]) # N: Revealed type is "builtins.int" \ # E: No overload variant of "__getitem__" of "tuple" matches argument type "object" \ # N: Possible overload variants: \ - # N: def __getitem__(self, int) -> int \ - # N: def __getitem__(self, slice) -> Tuple[int, ...] + # N: def __getitem__(self, int, /) -> int \ + # N: def __getitem__(self, slice, /) -> Tuple[int, ...] return self.x def bad_override(self) -> int: return self.x diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index 8adf2e7ed5f1..d5fb830487e8 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -3,64 +3,56 @@ [case testMethodCall] +class A: + def foo(self, x: 'A') -> None: pass +class B: + def bar(self, x: 'B', y: A) -> None: pass a = None # type: A b = None # type: B -a.foo(B()) # Fail -a.bar(B(), A()) # Fail +a.foo(B()) # E: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" +a.bar(B(), A()) # E: "A" has no attribute "bar" a.foo(A()) b.bar(B(), A()) +[case testMethodCallWithSubtype] class A: def foo(self, x: 'A') -> None: pass -class B: - def bar(self, x: 'B', y: A) -> None: pass -[out] -main:5: error: Argument 1 to "foo" of "A" has incompatible type "B"; expected "A" -main:6: error: "A" has no attribute "bar" - -[case testMethodCallWithSubtype] + def bar(self, x: 'B') -> None: pass +class B(A): pass a = None # type: A a.foo(A()) a.foo(B()) -a.bar(A()) # Fail +a.bar(A()) # E: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" a.bar(B()) +[case testInheritingMethod] class A: - def foo(self, x: 'A') -> None: pass - def bar(self, x: 'B') -> None: pass + def foo(self, x: 'B') -> None: pass class B(A): pass -[out] -main:5: error: Argument 1 to "bar" of "A" has incompatible type "A"; expected "B" - -[case testInheritingMethod] a = None # type: B a.foo(A()) # Fail a.foo(B()) -class A: - def foo(self, x: 'B') -> None: pass -class B(A): pass -[targets __main__, __main__, __main__.A.foo] +[targets __main__, __main__.A.foo] [out] -main:3: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" +main:6: error: Argument 1 to "foo" of "A" has incompatible type "A"; expected "B" [case testMethodCallWithInvalidNumberOfArguments] +class A: + def foo(self, x: 'A') -> None: pass a = None # type: A a.foo() # Fail a.foo(object(), A()) # Fail - -class A: - def foo(self, x: 'A') -> None: pass [out] -main:3: error: Missing positional argument "x" in call to "foo" of "A" -main:4: error: Too many arguments for "foo" of "A" -main:4: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" +main:5: error: Missing positional argument "x" in call to "foo" of "A" +main:6: error: Too many arguments for "foo" of "A" +main:6: error: Argument 1 to "foo" of "A" has incompatible type "object"; expected "A" [case testMethodBody] import typing @@ -216,13 +208,11 @@ main:11: error: "B" has no attribute "a" [case testExplicitAttributeInBody] -a = None # type: A -a.x = object() # Fail -a.x = A() class A: x = None # type: A -[out] -main:3: error: Incompatible types in assignment (expression has type "object", variable has type "A") +a = None # type: A +a.x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") +a.x = A() [case testAttributeDefinedInNonInitMethod] import typing @@ -341,6 +331,59 @@ main:7: note: This violates the Liskov substitution principle main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides main:9: error: Return type "object" of "h" incompatible with return type "A" in supertype "A" +[case testMethodOverridingWithIncompatibleTypesOnMultipleLines] +class A: + def f(self, x: int, y: str) -> None: pass +class B(A): + def f( + self, + x: int, + y: bool, + ) -> None: + pass +[out] +main:7: error: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "str" +main:7: note: This violates the Liskov substitution principle +main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + +[case testMultiLineMethodOverridingWithIncompatibleTypesIgnorableAtArgument] +class A: + def f(self, x: int, y: str) -> None: pass + +class B(A): + def f( + self, + x: int, + y: bool, # type: ignore[override] + ) -> None: + pass + +[case testMultiLineMethodOverridingWithIncompatibleTypesIgnorableAtDefinition] +class A: + def f(self, x: int, y: str) -> None: pass +class B(A): + def f( # type: ignore[override] + self, + x: int, + y: bool, + ) -> None: + pass + +[case testMultiLineMethodOverridingWithIncompatibleTypesWrongIgnore] +class A: + def f(self, x: int, y: str) -> None: pass +class B(A): + def f( # type: ignore[return-type] + self, + x: int, + y: bool, + ) -> None: + pass +[out] +main:7: error: Argument 2 of "f" is incompatible with supertype "A"; supertype defines the argument type as "str" +main:7: note: This violates the Liskov substitution principle +main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + [case testEqMethodsOverridingWithNonObjects] class A: def __eq__(self, other: A) -> bool: pass # Fail @@ -629,64 +672,50 @@ class B(A): [case testTrivialConstructor] -import typing -a = A() # type: A -b = A() # type: B # Fail class A: def __init__(self) -> None: pass -class B: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +a = A() # type: A +b = A() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") +class B: pass [case testConstructor] -import typing -a = A(B()) # type: A -aa = A(object()) # type: A # Fail -b = A(B()) # type: B # Fail class A: def __init__(self, x: 'B') -> None: pass class B: pass -[out] -main:3: error: Argument 1 to "A" has incompatible type "object"; expected "B" -main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") -[case testConstructorWithTwoArguments] -import typing -a = A(C(), B()) # type: A # Fail +a = A(B()) # type: A +aa = A(object()) # type: A # E: Argument 1 to "A" has incompatible type "object"; expected "B" +b = A(B()) # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") +[case testConstructorWithTwoArguments] class A: def __init__(self, x: 'B', y: 'C') -> None: pass class B: pass class C(B): pass -[out] -main:2: error: Argument 2 to "A" has incompatible type "B"; expected "C" + +a = A(C(), B()) # type: A # E: Argument 2 to "A" has incompatible type "B"; expected "C" [case testInheritedConstructor] -import typing -b = B(C()) # type: B -a = B(D()) # type: A # Fail -class A: - def __init__(self, x: 'C') -> None: pass class B(A): pass class C: pass class D: pass -[out] -main:3: error: Argument 1 to "B" has incompatible type "D"; expected "C" + +b = B(C()) # type: B +a = B(D()) # type: A # E: Argument 1 to "B" has incompatible type "D"; expected "C" +class A: + def __init__(self, x: 'C') -> None: pass [case testOverridingWithIncompatibleConstructor] -import typing -A() # Fail -B(C()) # Fail -A(C()) -B() class A: def __init__(self, x: 'C') -> None: pass class B(A): def __init__(self) -> None: pass class C: pass -[out] -main:2: error: Missing positional argument "x" in call to "A" -main:3: error: Too many arguments for "B" + +A() # E: Missing positional argument "x" in call to "A" +B(C()) # E: Too many arguments for "B" +A(C()) +B() [case testConstructorWithReturnValueType] import typing @@ -826,15 +855,12 @@ class Foo: pass [case testGlobalFunctionInitWithReturnType] -import typing -a = __init__() # type: A -b = __init__() # type: B # Fail -def __init__() -> 'A': pass class A: pass class B: pass -[out] -main:3: error: Incompatible types in assignment (expression has type "A", variable has type "B") +def __init__() -> 'A': pass +a = __init__() # type: A +b = __init__() # type: B # E: Incompatible types in assignment (expression has type "A", variable has type "B") [case testAccessingInit] from typing import Any, cast class A: @@ -844,7 +870,12 @@ a.__init__(a) # E: Accessing "__init__" on an instance is unsound, since instan (cast(Any, a)).__init__(a) [case testDeepInheritanceHierarchy] -import typing +class A: pass +class B(A): pass +class C(B): pass +class D(C): pass +class D2(C): pass + d = C() # type: D # E: Incompatible types in assignment (expression has type "C", variable has type "D") if int(): d = B() # E: Incompatible types in assignment (expression has type "B", variable has type "D") @@ -859,12 +890,23 @@ b = D() # type: B if int(): b = D2() -class A: pass -class B(A): pass -class C(B): pass -class D(C): pass -class D2(C): pass +[case testConstructorJoinsWithCustomMetaclass] +# flags: --strict-optional +from typing import TypeVar +import abc +def func() -> None: pass +class NormalClass: pass +class WithMetaclass(metaclass=abc.ABCMeta): pass + +T = TypeVar('T') +def join(x: T, y: T) -> T: pass + +f1 = join(func, WithMetaclass) +reveal_type(f1()) # N: Revealed type is "Union[__main__.WithMetaclass, None]" + +f2 = join(WithMetaclass, func) +reveal_type(f2()) # N: Revealed type is "Union[__main__.WithMetaclass, None]" -- Attribute access in class body -- ------------------------------ @@ -1013,7 +1055,7 @@ A.B = None # E: Cannot assign to a type [targets __main__] [case testAccessingClassAttributeWithTypeInferenceIssue] -x = C.x # E: Cannot determine type of "x" +x = C.x # E: Cannot determine type of "x" # E: Name "C" is used before definition def f() -> int: return 1 class C: x = f() @@ -1060,6 +1102,35 @@ def f() -> None: a.g(a) # E: Too many arguments for "g" of "A" [targets __main__, __main__.f] +[case testGenericClassWithinFunction] +from typing import TypeVar + +def test() -> None: + T = TypeVar('T', bound='Foo') + class Foo: + def returns_int(self) -> int: + return 0 + + def bar(self, foo: T) -> T: + x: T = foo + reveal_type(x) # N: Revealed type is "T`-1" + reveal_type(x.returns_int()) # N: Revealed type is "builtins.int" + return foo + reveal_type(Foo.bar) # N: Revealed type is "def [T <: __main__.Foo@5] (self: __main__.Foo@5, foo: T`-1) -> T`-1" + +[case testGenericClassWithInvalidTypevarUseWithinFunction] +from typing import TypeVar + +def test() -> None: + T = TypeVar('T', bound='Foo') + class Foo: + invalid: T # E: Type variable "T" is unbound \ + # N: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) \ + # N: (Hint: Use "T" in function signature to bind "T" inside a function) + + def bar(self, foo: T) -> T: + pass + [case testConstructNestedClass] import typing class A: @@ -1143,7 +1214,7 @@ reveal_type(Foo().Meta.name) # N: Revealed type is "builtins.str" class A: def __init__(self): - self.x = None # type: int + self.x = None # type: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs a = None # type: A a.x = 1 a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") @@ -1155,7 +1226,7 @@ a.x = 1 a.x = '' # E: Incompatible types in assignment (expression has type "str", variable has type "int") class A: def __init__(self): - self.x = None # type: int + self.x = None # type: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs -- Special cases @@ -1163,13 +1234,9 @@ class A: [case testMultipleClassDefinition] -import typing -A() -class A: pass class A: pass -[out] -main:4: error: Name "A" already defined on line 3 - +class A: pass # E: Name "A" already defined on line 1 +A() [case testDocstringInClass] import typing class A: @@ -1897,12 +1964,12 @@ class B(A): [out] tmp/foo.pyi:5: error: Signature of "__add__" incompatible with supertype "A" tmp/foo.pyi:5: note: Superclass: -tmp/foo.pyi:5: note: def __add__(self, int) -> int +tmp/foo.pyi:5: note: def __add__(self, int, /) -> int tmp/foo.pyi:5: note: Subclass: tmp/foo.pyi:5: note: @overload -tmp/foo.pyi:5: note: def __add__(self, int) -> int +tmp/foo.pyi:5: note: def __add__(self, int, /) -> int tmp/foo.pyi:5: note: @overload -tmp/foo.pyi:5: note: def __add__(self, str) -> str +tmp/foo.pyi:5: note: def __add__(self, str, /) -> str tmp/foo.pyi:5: note: Overloaded operator methods cannot have wider argument types in overrides [case testOperatorMethodOverrideWideningArgumentType] @@ -2012,16 +2079,16 @@ class B(A): tmp/foo.pyi:8: error: Signature of "__add__" incompatible with supertype "A" tmp/foo.pyi:8: note: Superclass: tmp/foo.pyi:8: note: @overload -tmp/foo.pyi:8: note: def __add__(self, int) -> A +tmp/foo.pyi:8: note: def __add__(self, int, /) -> A tmp/foo.pyi:8: note: @overload -tmp/foo.pyi:8: note: def __add__(self, str) -> A +tmp/foo.pyi:8: note: def __add__(self, str, /) -> A tmp/foo.pyi:8: note: Subclass: tmp/foo.pyi:8: note: @overload -tmp/foo.pyi:8: note: def __add__(self, int) -> A +tmp/foo.pyi:8: note: def __add__(self, int, /) -> A tmp/foo.pyi:8: note: @overload -tmp/foo.pyi:8: note: def __add__(self, str) -> A +tmp/foo.pyi:8: note: def __add__(self, str, /) -> A tmp/foo.pyi:8: note: @overload -tmp/foo.pyi:8: note: def __add__(self, type) -> A +tmp/foo.pyi:8: note: def __add__(self, type, /) -> A tmp/foo.pyi:8: note: Overloaded operator methods cannot have wider argument types in overrides [case testOverloadedOperatorMethodOverrideWithSwitchedItemOrder] @@ -2305,7 +2372,7 @@ reveal_type(Fraction() + Fraction()) # N: Revealed type is "builtins.str" [case testReverseOperatorTypeVar2b] from typing import TypeVar -T = TypeVar("T", Real, Fraction) +T = TypeVar("T", "Real", "Fraction") class Real: def __add__(self, other: Fraction) -> str: ... class Fraction(Real): @@ -2348,9 +2415,9 @@ a: Union[int, float] b: int c: float -reveal_type(a + a) # N: Revealed type is "builtins.float" -reveal_type(a + b) # N: Revealed type is "builtins.float" -reveal_type(b + a) # N: Revealed type is "builtins.float" +reveal_type(a + a) # N: Revealed type is "Union[builtins.int, builtins.float]" +reveal_type(a + b) # N: Revealed type is "Union[builtins.int, builtins.float]" +reveal_type(b + a) # N: Revealed type is "Union[builtins.int, builtins.float]" reveal_type(a + c) # N: Revealed type is "builtins.float" reveal_type(c + a) # N: Revealed type is "builtins.float" [builtins fixtures/ops.pyi] @@ -2489,8 +2556,8 @@ def sum(x: Iterable[T]) -> Union[T, int]: ... def len(x: Iterable[T]) -> int: ... x = [1.1, 2.2, 3.3] -reveal_type(sum(x)) # N: Revealed type is "builtins.float" -reveal_type(sum(x) / len(x)) # N: Revealed type is "builtins.float" +reveal_type(sum(x)) # N: Revealed type is "Union[builtins.float, builtins.int]" +reveal_type(sum(x) / len(x)) # N: Revealed type is "Union[builtins.float, builtins.int]" [builtins fixtures/floatdict.pyi] [case testOperatorWithEmptyListAndSum] @@ -2612,10 +2679,10 @@ class D(A): def __iadd__(self, x: 'A') -> 'B': pass [out] main:6: error: Return type "A" of "__iadd__" incompatible with return type "B" in "__add__" of supertype "A" +main:8: error: Signatures of "__iadd__" and "__add__" are incompatible main:8: error: Argument 1 of "__iadd__" is incompatible with "__add__" of supertype "A"; supertype defines the argument type as "A" main:8: note: This violates the Liskov substitution principle main:8: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides -main:8: error: Signatures of "__iadd__" and "__add__" are incompatible [case testGetattribute] @@ -2855,7 +2922,7 @@ b.bad = 'a' # E: Incompatible types in assignment (expression has type "str", v from typing import Any class Test: - def __setattr__() -> None: ... # E: Method must have at least one argument # E: Invalid signature "Callable[[], None]" for "__setattr__" + def __setattr__() -> None: ... # E: Method must have at least one argument. Did you forget the "self" argument? # E: Invalid signature "Callable[[], None]" for "__setattr__" t = Test() t.crash = 'test' # E: "Test" has no attribute "crash" @@ -2909,7 +2976,11 @@ c.__setattr__("x", 42, p=True) [case testCallableObject] -import typing +class A: + def __call__(self, x: 'A') -> 'A': + pass +class B: pass + a = A() b = B() @@ -2922,19 +2993,15 @@ if int(): if int(): b = a(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") -class A: - def __call__(self, x: A) -> A: - pass -class B: pass - -- __new__ -- -------- [case testConstructInstanceWith__new__] +from typing import Optional class C: - def __new__(cls, foo: int = None) -> 'C': + def __new__(cls, foo: Optional[int] = None) -> 'C': obj = object.__new__(cls) return obj @@ -3232,7 +3299,8 @@ def error(u_c: Type[U]) -> P: # Error here, see below return new_pro(u_c) # Error here, see below [out] main:11: note: Revealed type is "__main__.WizUser" -main:12: error: A function returning TypeVar should receive at least one argument containing the same Typevar +main:12: error: A function returning TypeVar should receive at least one argument containing the same TypeVar +main:12: note: Consider using the upper bound "ProUser" instead main:13: error: Value of type variable "P" of "new_pro" cannot be "U" main:13: error: Incompatible return value type (got "U", expected "P") @@ -3824,28 +3892,59 @@ class Super: def foo(self, a: C) -> C: pass class Sub(Super): - @overload # Fail + @overload def foo(self, a: A) -> A: pass @overload def foo(self, a: B) -> C: pass # Fail @overload def foo(self, a: C) -> C: pass + +class Sub2(Super): + @overload + def foo(self, a: B) -> C: pass # Fail + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass + +class Sub3(Super): + @overload + def foo(self, a: A) -> int: pass + @overload + def foo(self, a: A) -> A: pass + @overload + def foo(self, a: C) -> C: pass [builtins fixtures/classmethod.pyi] [out] -tmp/foo.pyi:16: error: Signature of "foo" incompatible with supertype "Super" -tmp/foo.pyi:16: note: Superclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C -tmp/foo.pyi:16: note: Subclass: -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: A) -> A -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: B) -> C -tmp/foo.pyi:16: note: @overload -tmp/foo.pyi:16: note: def foo(self, a: C) -> C tmp/foo.pyi:19: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader +tmp/foo.pyi:24: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:24: note: Superclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:24: note: Subclass: +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: B) -> C +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: A) -> A +tmp/foo.pyi:24: note: @overload +tmp/foo.pyi:24: note: def foo(self, a: C) -> C +tmp/foo.pyi:25: error: Overloaded function signatures 1 and 2 overlap with incompatible return types +tmp/foo.pyi:32: error: Signature of "foo" incompatible with supertype "Super" +tmp/foo.pyi:32: note: Superclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:32: note: Subclass: +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> int +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: A) -> A +tmp/foo.pyi:32: note: @overload +tmp/foo.pyi:32: note: def foo(self, a: C) -> C +tmp/foo.pyi:35: error: Overloaded function signature 2 will never be matched: signature 1's parameter type(s) are the same or broader [case testTypeTypeOverlapsWithObjectAndType] from foo import * @@ -4317,7 +4416,7 @@ class C(B): x = object() [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") -main:6: error: Incompatible types in assignment (expression has type "object", base class "B" defined the type as "str") +main:6: error: Incompatible types in assignment (expression has type "object", base class "A" defined the type as "int") [case testClassOneErrorPerLine] class A: @@ -4327,7 +4426,7 @@ class B(A): x = 1.0 [out] main:4: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") -main:5: error: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") +main:5: error: Incompatible types in assignment (expression has type "float", base class "A" defined the type as "int") [case testClassIgnoreType_RedefinedAttributeAndGrandparentAttributeTypesNotIgnored] class A: @@ -4335,7 +4434,7 @@ class A: class B(A): x = '' # type: ignore class C(B): - x = '' + x = '' # E: Incompatible types in assignment (expression has type "str", base class "A" defined the type as "int") [out] [case testClassIgnoreType_RedefinedAttributeTypeIgnoredInChildren] @@ -4351,7 +4450,7 @@ class C(B): class X(type): pass class Y(type): pass class A(metaclass=X): pass -class B(A, metaclass=Y): pass # E: Inconsistent metaclass structure for "B" +class B(A, metaclass=Y): pass # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [case testMetaclassNoTypeReveal] class M: @@ -4392,6 +4491,39 @@ def f(TB: Type[B]): reveal_type(TB) # N: Revealed type is "Type[__main__.B]" reveal_type(TB.x) # N: Revealed type is "builtins.int" +[case testMetaclassAsAny] +from typing import Any, ClassVar, Type + +MyAny: Any +class WithMeta(metaclass=MyAny): + x: ClassVar[int] + +reveal_type(WithMeta.a) # N: Revealed type is "Any" +reveal_type(WithMeta.m) # N: Revealed type is "Any" +reveal_type(WithMeta.x) # N: Revealed type is "builtins.int" +reveal_type(WithMeta().x) # N: Revealed type is "builtins.int" +WithMeta().m # E: "WithMeta" has no attribute "m" +WithMeta().a # E: "WithMeta" has no attribute "a" +t: Type[WithMeta] +t.unknown # OK + +[case testMetaclassAsAnyWithAFlag] +# flags: --disallow-subclassing-any +from typing import Any, ClassVar, Type + +MyAny: Any +class WithMeta(metaclass=MyAny): # E: Class cannot use "MyAny" as a metaclass (has type "Any") + x: ClassVar[int] + +reveal_type(WithMeta.a) # N: Revealed type is "Any" +reveal_type(WithMeta.m) # N: Revealed type is "Any" +reveal_type(WithMeta.x) # N: Revealed type is "builtins.int" +reveal_type(WithMeta().x) # N: Revealed type is "builtins.int" +WithMeta().m # E: "WithMeta" has no attribute "m" +WithMeta().a # E: "WithMeta" has no attribute "a" +t: Type[WithMeta] +t.unknown # OK + [case testMetaclassIterable] from typing import Iterable, Iterator @@ -4476,15 +4608,7 @@ from missing import M class A(metaclass=M): y = 0 reveal_type(A.y) # N: Revealed type is "builtins.int" -A.x # E: "Type[A]" has no attribute "x" - -[case testAnyMetaclass] -from typing import Any -M = None # type: Any -class A(metaclass=M): - y = 0 -reveal_type(A.y) # N: Revealed type is "builtins.int" -A.x # E: "Type[A]" has no attribute "x" +reveal_type(A.x) # N: Revealed type is "Any" [case testValidTypeAliasAsMetaclass] from typing_extensions import TypeAlias @@ -4710,7 +4834,7 @@ class A(Tuple[int, str]): pass -- ----------------------- [case testCrashOnSelfRecursiveNamedTupleVar] - +# flags: --disable-recursive-aliases from typing import NamedTuple N = NamedTuple('N', [('x', N)]) # E: Cannot resolve name "N" (possible cyclic definition) @@ -4740,7 +4864,7 @@ lst = [n, m] [builtins fixtures/isinstancelist.pyi] [case testCorrectJoinOfSelfRecursiveTypedDicts] - +# flags: --disable-recursive-aliases from mypy_extensions import TypedDict class N(TypedDict): @@ -4833,7 +4957,7 @@ reveal_type(x.frob) # N: Revealed type is "def (foos: builtins.dict[Any, __main_ [case testNewTypeFromForwardNamedTuple] from typing import NewType, NamedTuple, Tuple -NT = NewType('NT', N) +NT = NewType('NT', 'N') class N(NamedTuple): x: int @@ -4847,7 +4971,7 @@ x = NT(N(1)) from typing import NewType, Tuple from mypy_extensions import TypedDict -NT = NewType('NT', N) # E: Argument 2 to NewType(...) must be subclassable (got "N") +NT = NewType('NT', 'N') # E: Argument 2 to NewType(...) must be subclassable (got "N") class N(TypedDict): x: int [builtins fixtures/dict.pyi] @@ -4960,7 +5084,7 @@ def foo(node: Node) -> Node: [case testForwardReferencesInNewTypeMRORecomputed] from typing import NewType x: Foo -Foo = NewType('Foo', B) +Foo = NewType('Foo', 'B') class A: x: int class B(A): @@ -5213,8 +5337,8 @@ class CD(six.with_metaclass(M)): pass # E: Multiple metaclass definitions class M1(type): pass class Q1(metaclass=M1): pass @six.add_metaclass(M) -class CQA(Q1): pass # E: Inconsistent metaclass structure for "CQA" -class CQW(six.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for "CQW" +class CQA(Q1): pass # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases +class CQW(six.with_metaclass(M, Q1)): pass # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [builtins fixtures/tuple.pyi] [case testSixMetaclassAny] @@ -5226,6 +5350,19 @@ class F(six.with_metaclass(t.M)): pass class G: pass [builtins fixtures/tuple.pyi] +[case testSixMetaclassGenericBase] +import six +import abc +from typing import TypeVar, Generic + +T = TypeVar("T") + +class C(six.with_metaclass(abc.ABCMeta, Generic[T])): + pass +class D(six.with_metaclass(abc.ABCMeta, C[T])): + pass +[builtins fixtures/tuple.pyi] + -- Special support for future.utils -- -------------------------------- @@ -5319,7 +5456,7 @@ class C5(future.utils.with_metaclass(f())): pass # E: Dynamic metaclass not sup class M1(type): pass class Q1(metaclass=M1): pass -class CQW(future.utils.with_metaclass(M, Q1)): pass # E: Inconsistent metaclass structure for "CQW" +class CQW(future.utils.with_metaclass(M, Q1)): pass # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [builtins fixtures/tuple.pyi] [case testFutureMetaclassAny] @@ -5345,7 +5482,7 @@ class F: [case testCorrectEnclosingClassPushedInDeferred2] from typing import TypeVar -T = TypeVar('T', bound=C) +T = TypeVar('T', bound='C') class C: def m(self: T) -> T: class Inner: @@ -5497,6 +5634,13 @@ class E(Protocol): # OK, is a protocol class F(E, Protocol): # OK, is a protocol pass +# Custom metaclass subclassing `ABCMeta`, see #13561 +class CustomMeta(ABCMeta): + pass + +class G(A, metaclass=CustomMeta): # Ok, has CustomMeta as a metaclass + pass + [file b.py] # All of these are OK because this is not a stub file. from abc import ABCMeta, abstractmethod @@ -5525,6 +5669,12 @@ class E(Protocol): class F(E, Protocol): pass +class CustomMeta(ABCMeta): + pass + +class G(A, metaclass=CustomMeta): + pass + [case testClassMethodOverride] from typing import Callable, Any @@ -6663,6 +6813,81 @@ class MyMetaClass(type): class MyClass(metaclass=MyMetaClass): pass + +[case testMetaclassPlaceholderNode] +from sympy.assumptions import ManagedProperties +from sympy.ops import AssocOp +reveal_type(AssocOp.x) # N: Revealed type is "sympy.basic.Basic" +reveal_type(AssocOp.y) # N: Revealed type is "builtins.int" + +[file sympy/__init__.py] + +[file sympy/assumptions.py] +from .basic import Basic +class ManagedProperties(type): + x: Basic + y: int +# The problem is with the next line, +# it creates the following order (classname, metaclass): +# 1. Basic NameExpr(ManagedProperties) +# 2. AssocOp None +# 3. ManagedProperties None +# 4. Basic NameExpr(ManagedProperties [sympy.assumptions.ManagedProperties]) +# So, `AssocOp` will still have `metaclass_type` as `None` +# and all its `mro` types will have `declared_metaclass` as `None`. +from sympy.ops import AssocOp + +[file sympy/basic.py] +from .assumptions import ManagedProperties +class Basic(metaclass=ManagedProperties): ... + +[file sympy/ops.py] +from sympy.basic import Basic +class AssocOp(Basic): ... + +[case testMetaclassSubclassSelf] +# This does not make much sense, but we must not crash: +import a +[file m.py] +from a import A # E: Module "a" has no attribute "A" +class Meta(A): pass +[file a.py] +from m import Meta +class A(metaclass=Meta): pass + +[case testMetaclassConflict] +class MyMeta1(type): ... +class MyMeta2(type): ... +class MyMeta3(type): ... +class A(metaclass=MyMeta1): ... +class B(metaclass=MyMeta2): ... +class C(metaclass=type): ... +class A1(A): ... +class E: ... + +class CorrectMeta(MyMeta1, MyMeta2): ... +class CorrectSubclass1(A1, B, E, metaclass=CorrectMeta): ... +class CorrectSubclass2(A, B, E, metaclass=CorrectMeta): ... +class CorrectSubclass3(B, A, metaclass=CorrectMeta): ... + +class ChildOfCorrectSubclass1(CorrectSubclass1): ... + +class CorrectWithType1(C, A1): ... +class CorrectWithType2(B, C): ... + +class Conflict1(A1, B, E): ... # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases +class Conflict2(A, B): ... # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases +class Conflict3(B, A): ... # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases + +class ChildOfConflict1(Conflict3): ... # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases +class ChildOfConflict2(Conflict3, metaclass=CorrectMeta): ... + +class ConflictingMeta(MyMeta1, MyMeta3): ... +class Conflict4(A1, B, E, metaclass=ConflictingMeta): ... # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases + +class ChildOfCorrectButWrongMeta(CorrectSubclass1, metaclass=ConflictingMeta): # E: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases + ... + [case testGenericOverride] from typing import Generic, TypeVar, Any @@ -6871,7 +7096,7 @@ reveal_type(C.__new__) # N: Revealed type is "def (cls: Type[__main__.C]) -> An [case testOverrideGenericSelfClassMethod] from typing import Generic, TypeVar, Type, List -T = TypeVar('T', bound=A) +T = TypeVar('T', bound='A') class A: @classmethod @@ -6932,7 +7157,7 @@ reveal_type(Foo().y) # N: Revealed type is "builtins.list[Any]" # flags: --check-untyped-defs class Foo: - def bad(): # E: Method must have at least one argument + def bad(): # E: Method must have at least one argument. Did you forget the "self" argument? self.x = 0 # E: Name "self" is not defined [case testTypeAfterAttributeAccessWithDisallowAnyExpr] @@ -7021,7 +7246,7 @@ from typing import Callable class C: x: Callable[[C], int] = lambda x: x.y.g() # E: "C" has no attribute "y" -[case testOpWithInheritedFromAny] +[case testOpWithInheritedFromAny-xfail] from typing import Any C: Any class D(C): @@ -7257,8 +7482,7 @@ class Foo: def meth1(self, a: str) -> str: ... # E: Name "meth1" already defined on line 5 def meth2(self, a: str) -> str: ... - from mod1 import meth2 # E: Unsupported class scoped import \ - # E: Name "meth2" already defined on line 8 + from mod1 import meth2 # E: Incompatible import of "meth2" (imported name has type "Callable[[int], int]", local name has type "Callable[[Foo, str], str]") class Bar: from mod1 import foo # E: Unsupported class scoped import @@ -7296,6 +7520,74 @@ def meth1(self: Any, y: str) -> str: ... T = TypeVar("T") def meth2(self: Any, y: T) -> T: ... +[case testNewAndInitNoReturn] +from typing import NoReturn + +class A: + def __new__(cls) -> NoReturn: ... + +class B: + def __init__(self) -> NoReturn: ... + +class C: + def __new__(cls) -> "C": ... + def __init__(self) -> NoReturn: ... + +class D: + def __new__(cls) -> NoReturn: ... + def __init__(self) -> NoReturn: ... + +reveal_type(A()) # N: Revealed type is "" +reveal_type(B()) # N: Revealed type is "" +reveal_type(C()) # N: Revealed type is "" +reveal_type(D()) # N: Revealed type is "" + +[case testOverloadedNewAndInitNoReturn] +from typing import NoReturn, overload + +class A: + @overload + def __new__(cls) -> NoReturn: ... + @overload + def __new__(cls, a: int) -> "A": ... + def __new__(cls, a: int = ...) -> "A": ... + +class B: + @overload + def __init__(self) -> NoReturn: ... + @overload + def __init__(self, a: int) -> None: ... + def __init__(self, a: int = ...) -> None: ... + +class C: + def __new__(cls, a: int = ...) -> "C": ... + @overload + def __init__(self) -> NoReturn: ... + @overload + def __init__(self, a: int) -> None: ... + def __init__(self, a: int = ...) -> None: ... + +class D: + @overload + def __new__(cls) -> NoReturn: ... + @overload + def __new__(cls, a: int) -> "D": ... + def __new__(cls, a: int = ...) -> "D": ... + @overload + def __init__(self) -> NoReturn: ... + @overload + def __init__(self, a: int) -> None: ... + def __init__(self, a: int = ...) -> None: ... + +reveal_type(A()) # N: Revealed type is "" +reveal_type(A(1)) # N: Revealed type is "__main__.A" +reveal_type(B()) # N: Revealed type is "" +reveal_type(B(1)) # N: Revealed type is "__main__.B" +reveal_type(C()) # N: Revealed type is "" +reveal_type(C(1)) # N: Revealed type is "__main__.C" +reveal_type(D()) # N: Revealed type is "" +reveal_type(D(1)) # N: Revealed type is "__main__.D" + [case testClassScopeImportWithWrapperAndError] class Foo: from mod import foo # E: Unsupported class scoped import @@ -7366,3 +7658,76 @@ class D(C[List[T]]): ... di: D[int] reveal_type(di) # N: Revealed type is "Tuple[builtins.list[builtins.int], builtins.list[builtins.int], fallback=__main__.D[builtins.int]]" [builtins fixtures/tuple.pyi] + +[case testOverrideAttrWithSettableProperty] +class Foo: + def __init__(self) -> None: + self.x = 42 + +class Bar(Foo): + @property + def x(self) -> int: ... + @x.setter + def x(self, value: int) -> None: ... +[builtins fixtures/property.pyi] + +[case testOverrideAttrWithSettablePropertyAnnotation] +class Foo: + x: int + +class Bar(Foo): + @property + def x(self) -> int: ... + @x.setter + def x(self, value: int) -> None: ... +[builtins fixtures/property.pyi] + +[case testOverrideMethodProperty] +class B: + def foo(self) -> int: + ... +class C(B): + @property + def foo(self) -> int: # E: Signature of "foo" incompatible with supertype "B" + ... +[builtins fixtures/property.pyi] + +[case testOverridePropertyMethod] +class B: + @property + def foo(self) -> int: + ... +class C(B): + def foo(self) -> int: # E: Signature of "foo" incompatible with supertype "B" + ... +[builtins fixtures/property.pyi] + +[case testAllowArgumentAsBaseClass] +from typing import Any, Type + +def e(b) -> None: + class D(b): ... + +def f(b: Any) -> None: + class D(b): ... + +def g(b: Type[Any]) -> None: + class D(b): ... + +def h(b: type) -> None: + class D(b): ... + +[case testNoCrashOnSelfWithForwardRefGenericClass] +from typing import Generic, Sequence, TypeVar, Self + +_T = TypeVar('_T', bound="Foo") + +class Foo: + foo: int + +class Element(Generic[_T]): + elements: Sequence[Self] + +class Bar(Foo): ... +e: Element[Bar] +reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" diff --git a/test-data/unit/check-columns.test b/test-data/unit/check-columns.test index 7c4681c7a709..9691e6565689 100644 --- a/test-data/unit/check-columns.test +++ b/test-data/unit/check-columns.test @@ -211,7 +211,7 @@ y: Dict[int, int] = { [builtins fixtures/dict.pyi] [case testColumnCannotDetermineType] -(x) # E:2: Cannot determine type of "x" +(x) # E:2: Cannot determine type of "x" # E:2: Name "x" is used before definition x = None [case testColumnInvalidIndexing] @@ -238,9 +238,9 @@ if int(): class A: def f(self, x: int) -> None: pass class B(A): - def f(self, x: str) -> None: pass # E:5: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" \ - # N:5: This violates the Liskov substitution principle \ - # N:5: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides + def f(self, x: str) -> None: pass # E:17: Argument 1 of "f" is incompatible with supertype "A"; supertype defines the argument type as "int" \ + # N:17: This violates the Liskov substitution principle \ + # N:17: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides class C(A): def f(self, x: int) -> int: pass # E:5: Return type "int" of "f" incompatible with return type "None" in supertype "A" class D(A): diff --git a/test-data/unit/check-ctypes.test b/test-data/unit/check-ctypes.test index 605c54fb5694..beb1afd779c0 100644 --- a/test-data/unit/check-ctypes.test +++ b/test-data/unit/check-ctypes.test @@ -15,11 +15,12 @@ a[1] = ctypes.c_int(42) a[2] = MyCInt(42) a[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "bytes" \ # N: Possible overload variants: \ - # N: def __setitem__(self, int, Union[c_int, int]) -> None \ - # N: def __setitem__(self, slice, List[Union[c_int, int]]) -> None + # N: def __setitem__(self, int, Union[c_int, int], /) -> None \ + # N: def __setitem__(self, slice, List[Union[c_int, int]], /) -> None for x in a: reveal_type(x) # N: Revealed type is "builtins.int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayCustomElementType] import ctypes @@ -38,13 +39,13 @@ reveal_type(mya[1:3]) # N: Revealed type is "builtins.list[__main__.MyCInt]" mya[0] = 42 mya[1] = ctypes.c_int(42) # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "c_int" \ # N: Possible overload variants: \ - # N: def __setitem__(self, int, Union[MyCInt, int]) -> None \ - # N: def __setitem__(self, slice, List[Union[MyCInt, int]]) -> None + # N: def __setitem__(self, int, Union[MyCInt, int], /) -> None \ + # N: def __setitem__(self, slice, List[Union[MyCInt, int]], /) -> None mya[2] = MyCInt(42) mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "bytes" \ # N: Possible overload variants: \ - # N: def __setitem__(self, int, Union[MyCInt, int]) -> None \ - # N: def __setitem__(self, slice, List[Union[MyCInt, int]]) -> None + # N: def __setitem__(self, int, Union[MyCInt, int], /) -> None \ + # N: def __setitem__(self, slice, List[Union[MyCInt, int]], /) -> None for myx in mya: reveal_type(myx) # N: Revealed type is "__main__.MyCInt" @@ -52,6 +53,7 @@ myu: Union[ctypes.Array[ctypes.c_int], List[str]] for myi in myu: reveal_type(myi) # N: Revealed type is "Union[builtins.int, builtins.str]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayUnionElementType] import ctypes @@ -71,11 +73,12 @@ mya[1] = ctypes.c_uint(42) mya[2] = MyCInt(42) mya[3] = b"bytes" # E: No overload variant of "__setitem__" of "Array" matches argument types "int", "bytes" \ # N: Possible overload variants: \ - # N: def __setitem__(self, int, Union[MyCInt, int, c_uint]) -> None \ - # N: def __setitem__(self, slice, List[Union[MyCInt, int, c_uint]]) -> None + # N: def __setitem__(self, int, Union[MyCInt, int, c_uint], /) -> None \ + # N: def __setitem__(self, slice, List[Union[MyCInt, int, c_uint]], /) -> None for myx in mya: reveal_type(myx) # N: Revealed type is "Union[__main__.MyCInt, builtins.int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharArrayAttrs] import ctypes @@ -84,6 +87,7 @@ ca = (ctypes.c_char * 4)(b'a', b'b', b'c', b'\x00') reveal_type(ca.value) # N: Revealed type is "builtins.bytes" reveal_type(ca.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharPArrayDoesNotCrash] import ctypes @@ -91,6 +95,7 @@ import ctypes # The following line used to crash with "Could not find builtin symbol 'NoneType'" ca = (ctypes.c_char_p * 0)() [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesWcharArrayAttrs] import ctypes @@ -99,6 +104,7 @@ wca = (ctypes.c_wchar * 4)('a', 'b', 'c', '\x00') reveal_type(wca.value) # N: Revealed type is "builtins.str" wca.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_wchar" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesCharUnionArrayAttrs] import ctypes @@ -108,6 +114,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_wchar]] reveal_type(cua.value) # N: Revealed type is "Union[builtins.bytes, builtins.str]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_wchar]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyUnionArrayAttrs] import ctypes @@ -117,6 +124,7 @@ caa: ctypes.Array[Union[ctypes.c_char, Any]] reveal_type(caa.value) # N: Revealed type is "Union[builtins.bytes, Any]" reveal_type(caa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherUnionArrayAttrs] import ctypes @@ -126,6 +134,7 @@ cua: ctypes.Array[Union[ctypes.c_char, ctypes.c_int]] cua.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "Union[c_char, c_int]" cua.raw # E: Array attribute "raw" is only available with element type "c_char", not "Union[c_char, c_int]" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesAnyArrayAttrs] import ctypes @@ -134,6 +143,7 @@ aa: ctypes.Array[Any] reveal_type(aa.value) # N: Revealed type is "Any" reveal_type(aa.raw) # N: Revealed type is "builtins.bytes" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesOtherArrayAttrs] import ctypes @@ -142,6 +152,7 @@ oa = (ctypes.c_int * 4)(1, 2, 3, 4) oa.value # E: Array attribute "value" is only available with element type "c_char" or "c_wchar", not "c_int" oa.raw # E: Array attribute "raw" is only available with element type "c_char", not "c_int" [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] [case testCtypesArrayConstructorStarargs] import ctypes @@ -154,6 +165,7 @@ reveal_type(intarr4(*int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_ reveal_type(intarr4(*c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" +[typing fixtures/typing-medium.pyi] float_values = [1.0, 2.0, 3.0, 4.0] intarr4(*float_values) # E: Array constructor argument 1 of type "List[float]" is not convertible to the array element type "Iterable[c_int]" @@ -167,3 +179,4 @@ x = {"a": 1, "b": 2} intarr4(**x) [builtins fixtures/floatdict.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-custom-plugin.test b/test-data/unit/check-custom-plugin.test index ee19113f000f..d7beea0390e7 100644 --- a/test-data/unit/check-custom-plugin.test +++ b/test-data/unit/check-custom-plugin.test @@ -163,6 +163,15 @@ reveal_type(f()) # N: Revealed type is "builtins.int" \[mypy] plugins=/test-data/unit/plugins/customentry.py:register +[case testCustomPluginEntryPointFileTrailingComma] +# flags: --config-file tmp/mypy.ini +def f() -> str: ... +reveal_type(f()) # N: Revealed type is "builtins.int" +[file mypy.ini] +\[mypy] +plugins = + /test-data/unit/plugins/customentry.py:register, + [case testCustomPluginEntryPoint] # flags: --config-file tmp/mypy.ini def f() -> str: ... @@ -991,3 +1000,17 @@ class Cls: [file mypy.ini] \[mypy] plugins=/test-data/unit/plugins/class_attr_hook.py + +[case testAddClassMethodPlugin] +# flags: --config-file tmp/mypy.ini +class BaseAddMethod: pass + +class MyClass(BaseAddMethod): + pass + +my_class = MyClass() +reveal_type(MyClass.foo_classmethod) # N: Revealed type is "def ()" +reveal_type(MyClass.foo_staticmethod) # N: Revealed type is "def (builtins.int) -> builtins.str" +[file mypy.ini] +\[mypy] +plugins=/test-data/unit/plugins/add_classmethod.py diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test new file mode 100644 index 000000000000..ec87bd4757ed --- /dev/null +++ b/test-data/unit/check-dataclass-transform.test @@ -0,0 +1,453 @@ +[case testDataclassTransformReusesDataclassLogic] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Person: + name: str + age: int + + def summary(self): + return "%s is %d years old." % (self.name, self.age) + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('Jonh', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformIsFoundInTypingExtensions] +# flags: --python-version 3.7 +from typing import Type +from typing_extensions import dataclass_transform + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Person: + name: str + age: int + + def summary(self): + return "%s is %d years old." % (self.name, self.age) + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('Jonh', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformParametersAreApplied] +# flags: --python-version 3.11 +from typing import dataclass_transform, Callable, Type + +@dataclass_transform() +def my_dataclass(*, eq: bool, order: bool) -> Callable[[Type], Type]: + def transform(cls: Type) -> Type: + return cls + return transform + +@my_dataclass(eq=False, order=True) # E: "eq" must be True if "order" is True +class Person: + name: str + age: int + +reveal_type(Person) # N: Revealed type is "def (name: builtins.str, age: builtins.int) -> __main__.Person" +Person('John', 32) +Person('John', 21, None) # E: Too many arguments for "Person" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformParametersMustBeBoolLiterals] +# flags: --python-version 3.11 +from typing import dataclass_transform, Callable, Type + +@dataclass_transform() +def my_dataclass(*, eq: bool = True, order: bool = False) -> Callable[[Type], Type]: + def transform(cls: Type) -> Type: + return cls + return transform +@dataclass_transform() +class BaseClass: + def __init_subclass__(cls, *, eq: bool): ... +@dataclass_transform() +class Metaclass(type): ... + +BOOL_CONSTANT = True +@my_dataclass(eq=BOOL_CONSTANT) # E: "eq" argument must be a True or False literal +class A: ... +@my_dataclass(order=not False) # E: "order" argument must be a True or False literal +class B: ... +class C(BaseClass, eq=BOOL_CONSTANT): ... # E: "eq" argument must be a True or False literal +class D(metaclass=Metaclass, order=not False): ... # E: "order" argument must be a True or False literal + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformDefaultParamsMustBeLiterals] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, Final + +BOOLEAN_CONSTANT = True +FINAL_BOOLEAN: Final = True + +@dataclass_transform(eq_default=BOOLEAN_CONSTANT) # E: "eq_default" argument must be a True or False literal +def foo(cls: Type) -> Type: + return cls +@dataclass_transform(eq_default=(not True)) # E: "eq_default" argument must be a True or False literal +def bar(cls: Type) -> Type: + return cls +@dataclass_transform(eq_default=FINAL_BOOLEAN) # E: "eq_default" argument must be a True or False literal +def baz(cls: Type) -> Type: + return cls + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformUnrecognizedParamsAreErrors] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type + +BOOLEAN_CONSTANT = True + +@dataclass_transform(nonexistant=True) # E: Unrecognized dataclass_transform parameter "nonexistant" +def foo(cls: Type) -> Type: + return cls + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + + +[case testDataclassTransformDefaultParams] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, Callable + +@dataclass_transform(eq_default=False) +def no_eq(*, order: bool = False) -> Callable[[Type], Type]: + return lambda cls: cls +@no_eq() +class Foo: ... +@no_eq(order=True) # E: "eq" must be True if "order" is True +class Bar: ... + + +@dataclass_transform(kw_only_default=True) +def always_use_kw(cls: Type) -> Type: + return cls +@always_use_kw +class Baz: + x: int +Baz(x=5) +Baz(5) # E: Too many positional arguments for "Baz" + +@dataclass_transform(order_default=True) +def ordered(*, eq: bool = True) -> Callable[[Type], Type]: + return lambda cls: cls +@ordered() +class A: + x: int +A(1) > A(2) + +@dataclass_transform(frozen_default=True) +def frozen(cls: Type) -> Type: + return cls +@frozen +class B: + x: int +b = B(x=1) +b.x = 2 # E: Property "x" defined in "B" is read-only + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformDefaultsCanBeOverridden] +# flags: --python-version 3.11 +from typing import dataclass_transform, Callable, Type + +@dataclass_transform(kw_only_default=True) +def my_dataclass(*, kw_only: bool = True) -> Callable[[Type], Type]: + return lambda cls: cls + +@my_dataclass() +class KwOnly: + x: int +@my_dataclass(kw_only=False) +class KwOptional: + x: int + +KwOnly(5) # E: Too many positional arguments for "KwOnly" +KwOptional(5) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifiersDefaultsToEmpty] +# flags: --python-version 3.11 +from dataclasses import field, dataclass +from typing import dataclass_transform, Type + +@dataclass_transform() +def my_dataclass(cls: Type) -> Type: + return cls + +@my_dataclass +class Foo: + foo: int = field(kw_only=True) + +# Does not cause a type error because `dataclasses.field` is not a recognized field specifier by +# default +Foo(5) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifierRejectMalformed] +# flags: --python-version 3.11 +from typing import dataclass_transform, Any, Callable, Final, Type + +def some_type() -> Type: ... +def some_function() -> Callable[[], None]: ... + +def field(*args, **kwargs): ... +def fields_tuple() -> tuple[type | Callable[..., Any], ...]: return (field,) +CONSTANT: Final = (field,) + +@dataclass_transform(field_specifiers=(some_type(),)) # E: "field_specifiers" must only contain identifiers +def bad_dataclass1() -> None: ... +@dataclass_transform(field_specifiers=(some_function(),)) # E: "field_specifiers" must only contain identifiers +def bad_dataclass2() -> None: ... +@dataclass_transform(field_specifiers=CONSTANT) # E: "field_specifiers" argument must be a tuple literal +def bad_dataclass3() -> None: ... +@dataclass_transform(field_specifiers=fields_tuple()) # E: "field_specifiers" argument must be a tuple literal +def bad_dataclass4() -> None: ... + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifierParams] +# flags: --python-version 3.11 +from typing import dataclass_transform, Any, Callable, Type, Final + +def field( + *, + init: bool = True, + kw_only: bool = False, + alias: str | None = None, + default: Any | None = None, + default_factory: Callable[[], Any] | None = None, + factory: Callable[[], Any] | None = None, +): ... +@dataclass_transform(field_specifiers=(field,)) +def my_dataclass(cls: Type) -> Type: + return cls + +B: Final = 'b_' +@my_dataclass +class Foo: + a: int = field(alias='a_') + b: int = field(alias=B) + # cannot be passed as a positional + kwonly: int = field(kw_only=True, default=0) + # Safe to omit from constructor, error to pass + noinit: int = field(init=False, default=1) + # It should be safe to call the constructor without passing any of these + unused1: int = field(default=0) + unused2: int = field(factory=lambda: 0) + unused3: int = field(default_factory=lambda: 0) + +Foo(a=5, b_=1) # E: Unexpected keyword argument "a" for "Foo" +Foo(a_=1, b_=1, noinit=1) # E: Unexpected keyword argument "noinit" for "Foo" +Foo(1, 2, 3) # E: Too many positional arguments for "Foo" +foo = Foo(1, 2, kwonly=3) +reveal_type(foo.noinit) # N: Revealed type is "builtins.int" +reveal_type(foo.unused1) # N: Revealed type is "builtins.int" +Foo(a_=5, b_=1, unused1=2, unused2=3, unused3=4) + +def some_str() -> str: ... +def some_bool() -> bool: ... +@my_dataclass +class Bad: + bad1: int = field(alias=some_str()) # E: "alias" argument to dataclass field must be a string literal + bad2: int = field(kw_only=some_bool()) # E: "kw_only" argument must be a boolean literal + +reveal_type(Foo.__dataclass_fields__) # N: Revealed type is "builtins.dict[builtins.str, Any]" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFieldSpecifierExtraArgs] +# flags: --python-version 3.11 +from typing import dataclass_transform + +def field(extra1, *, kw_only=False, extra2=0): ... +@dataclass_transform(field_specifiers=(field,)) +def my_dataclass(cls): + return cls + +@my_dataclass +class Good: + a: int = field(5) + b: int = field(5, extra2=1) + c: int = field(5, kw_only=True) + +@my_dataclass +class Bad: + a: int = field(kw_only=True) # E: Missing positional argument "extra1" in call to "field" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformMultipleFieldSpecifiers] +# flags: --python-version 3.11 +from typing import dataclass_transform + +def field1(*, default: int) -> int: ... +def field2(*, default: str) -> str: ... + +@dataclass_transform(field_specifiers=(field1, field2)) +def my_dataclass(cls): return cls + +@my_dataclass +class Foo: + a: int = field1(default=0) + b: str = field2(default='hello') + +reveal_type(Foo) # N: Revealed type is "def (a: builtins.int =, b: builtins.str =) -> __main__.Foo" +Foo() +Foo(a=1, b='bye') + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformOverloadsDecoratorOnOverload] +# flags: --python-version 3.11 +from typing import dataclass_transform, overload, Any, Callable, Type, Literal + +@overload +def my_dataclass(*, foo: str) -> Callable[[Type], Type]: ... +@overload +@dataclass_transform(frozen_default=True) +def my_dataclass(*, foo: int) -> Callable[[Type], Type]: ... +def my_dataclass(*, foo: Any) -> Callable[[Type], Type]: + return lambda cls: cls +@my_dataclass(foo="hello") +class A: + a: int +@my_dataclass(foo=5) +class B: + b: int + +reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> __main__.A" +reveal_type(B) # N: Revealed type is "def (b: builtins.int) -> __main__.B" +A(1, "hello") # E: Too many arguments for "A" +a = A(1) +a.a = 2 # E: Property "a" defined in "A" is read-only + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformOverloadsDecoratorOnImpl] +# flags: --python-version 3.11 +from typing import dataclass_transform, overload, Any, Callable, Type, Literal + +@overload +def my_dataclass(*, foo: str) -> Callable[[Type], Type]: ... +@overload +def my_dataclass(*, foo: int) -> Callable[[Type], Type]: ... +@dataclass_transform(frozen_default=True) +def my_dataclass(*, foo: Any) -> Callable[[Type], Type]: + return lambda cls: cls +@my_dataclass(foo="hello") +class A: + a: int +@my_dataclass(foo=5) +class B: + b: int + +reveal_type(A) # N: Revealed type is "def (a: builtins.int) -> __main__.A" +reveal_type(B) # N: Revealed type is "def (b: builtins.int) -> __main__.B" +A(1, "hello") # E: Too many arguments for "A" +a = A(1) +a.a = 2 # E: Property "a" defined in "A" is read-only + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformViaBaseClass] +# flags: --python-version 3.11 +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class Dataclass: + def __init_subclass__(cls, *, kw_only: bool = False): ... + +class Person(Dataclass, kw_only=True): + name: str + age: int + +reveal_type(Person) # N: Revealed type is "def (*, name: builtins.str, age: builtins.int) -> __main__.Person" +Person('Jonh', 21) # E: Too many positional arguments for "Person" +person = Person(name='John', age=32) +person.name = "John Smith" # E: Property "name" defined in "Person" is read-only + +class Contact(Person): + email: str + +reveal_type(Contact) # N: Revealed type is "def (email: builtins.str, *, name: builtins.str, age: builtins.int) -> __main__.Contact" +Contact('john@john.com', name='John', age=32) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformViaMetaclass] +# flags: --python-version 3.11 +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class Dataclass(type): ... + +class Person(metaclass=Dataclass, kw_only=True): + name: str + age: int + +reveal_type(Person) # N: Revealed type is "def (*, name: builtins.str, age: builtins.int) -> __main__.Person" +Person('Jonh', 21) # E: Too many positional arguments for "Person" +person = Person(name='John', age=32) +person.name = "John Smith" # E: Property "name" defined in "Person" is read-only + +class Contact(Person): + email: str + +reveal_type(Contact) # N: Revealed type is "def (email: builtins.str, *, name: builtins.str, age: builtins.int) -> __main__.Contact" +Contact('john@john.com', name='John', age=32) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformViaSubclassOfMetaclass] +# flags: --python-version 3.11 +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class BaseMeta(type): ... +class SubMeta(BaseMeta): ... + +# MyPy does *not* recognize this as a dataclass because the metaclass is not directly decorated with +# dataclass_transform +class Foo(metaclass=SubMeta): + foo: int + +reveal_type(Foo) # N: Revealed type is "def () -> __main__.Foo" +Foo(1) # E: Too many arguments for "Foo" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 6abb5597e464..4d85be391186 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -187,6 +187,66 @@ reveal_type(C) # N: Revealed type is "def (some_int: builtins.int, some_str: bu [builtins fixtures/dataclasses.pyi] +[case testDataclassIncompatibleOverrides] +# flags: --python-version 3.7 +from dataclasses import dataclass + +@dataclass +class Base: + foo: int + +@dataclass +class BadDerived1(Base): + def foo(self) -> int: # E: Dataclass attribute may only be overridden by another attribute \ + # E: Signature of "foo" incompatible with supertype "Base" + return 1 + +@dataclass +class BadDerived2(Base): + @property # E: Dataclass attribute may only be overridden by another attribute + def foo(self) -> int: # E: Cannot override writeable attribute with read-only property + return 2 + +@dataclass +class BadDerived3(Base): + class foo: pass # E: Dataclass attribute may only be overridden by another attribute +[builtins fixtures/dataclasses.pyi] + +[case testDataclassMultipleInheritance] +# flags: --python-version 3.7 +from dataclasses import dataclass + +class Unrelated: + foo: str + +@dataclass +class Base: + bar: int + +@dataclass +class Derived(Base, Unrelated): + pass + +d = Derived(3) +reveal_type(d.foo) # N: Revealed type is "builtins.str" +reveal_type(d.bar) # N: Revealed type is "builtins.int" +[builtins fixtures/dataclasses.pyi] + +[case testDataclassIncompatibleFrozenOverride] +# flags: --python-version 3.7 +from dataclasses import dataclass + +@dataclass(frozen=True) +class Base: + foo: int + +@dataclass(frozen=True) +class BadDerived(Base): + @property # E: Dataclass attribute may only be overridden by another attribute + def foo(self) -> int: + return 3 +[builtins fixtures/dataclasses.pyi] + [case testDataclassesFreezing] # flags: --python-version 3.7 from dataclasses import dataclass @@ -200,6 +260,28 @@ john.name = 'Ben' # E: Property "name" defined in "Person" is read-only [builtins fixtures/dataclasses.pyi] +[case testDataclassesInconsistentFreezing] +# flags: --python-version 3.7 +from dataclasses import dataclass + +@dataclass(frozen=True) +class FrozenBase: + pass + +@dataclass +class BadNormalDerived(FrozenBase): # E: Cannot inherit non-frozen dataclass from a frozen one + pass + +@dataclass +class NormalBase: + pass + +@dataclass(frozen=True) +class BadFrozenDerived(NormalBase): # E: Cannot inherit frozen dataclass from a non-frozen one + pass + +[builtins fixtures/dataclasses.pyi] + [case testDataclassesFields] # flags: --python-version 3.7 from dataclasses import dataclass, field @@ -550,9 +632,10 @@ class Two: S: TypeAlias = Callable[[int], str] # E: Type aliases inside dataclass definitions are not supported at runtime c = Two() -x = c.S # E: Member "S" is not assignable -reveal_type(x) # N: Revealed type is "Any" +x = c.S +reveal_type(x) # N: Revealed type is "typing._SpecialForm" [builtins fixtures/dataclasses.pyi] +[typing fixtures/typing-medium.pyi] [case testDataclassOrdering] # flags: --python-version 3.7 @@ -589,8 +672,8 @@ app1 >= app3 # flags: --python-version 3.7 from dataclasses import dataclass -@dataclass(eq=False, order=True) -class Application: # E: eq must be True if order is True +@dataclass(eq=False, order=True) # E: "eq" must be True if "order" is True +class Application: ... [builtins fixtures/dataclasses.pyi] @@ -601,7 +684,7 @@ from dataclasses import dataclass @dataclass(order=True) class Application: - def __lt__(self, other: 'Application') -> bool: # E: You may not have a custom __lt__ method when order=True + def __lt__(self, other: 'Application') -> bool: # E: You may not have a custom "__lt__" method when "order" is True ... [builtins fixtures/dataclasses.pyi] @@ -1283,10 +1366,10 @@ from dataclasses import dataclass class A: foo: int -@dataclass +@dataclass(frozen=True) class B(A): - @property - def foo(self) -> int: pass # E: Signature of "foo" incompatible with supertype "A" + @property # E: Dataclass attribute may only be overridden by another attribute + def foo(self) -> int: pass reveal_type(B) # N: Revealed type is "def (foo: builtins.int) -> __main__.B" @@ -1559,7 +1642,7 @@ A(a=func).a = func # E: Property "a" defined in "A" is read-only # flags: --python-version 3.7 from dataclasses import dataclass -def foo(): +def foo() -> None: @dataclass class Foo: foo: int @@ -1737,10 +1820,10 @@ c.x # E: "C" has no attribute "x" [case testDataclassCheckTypeVarBounds] # flags: --python-version 3.7 from dataclasses import dataclass -from typing import Protocol, Dict, TypeVar, Generic +from typing import ClassVar, Protocol, Dict, TypeVar, Generic class DataclassProtocol(Protocol): - __dataclass_fields__: Dict + __dataclass_fields__: ClassVar[Dict] T = TypeVar("T", bound=DataclassProtocol) @@ -1796,3 +1879,125 @@ t: Two reveal_type(t.__match_args__) # E: "Two" has no attribute "__match_args__" \ # N: Revealed type is "Any" [builtins fixtures/dataclasses.pyi] + +[case testFinalInDataclass] +from dataclasses import dataclass +from typing import Final + +@dataclass +class FirstClass: + FIRST_CONST: Final = 3 # OK + +@dataclass +class SecondClass: + SECOND_CONST: Final = FirstClass.FIRST_CONST # E: Need type argument for Final[...] with non-literal default in dataclass + +reveal_type(FirstClass().FIRST_CONST) # N: Revealed type is "Literal[3]?" +FirstClass().FIRST_CONST = 42 # E: Cannot assign to final attribute "FIRST_CONST" +reveal_type(SecondClass().SECOND_CONST) # N: Revealed type is "Literal[3]?" +SecondClass().SECOND_CONST = 42 # E: Cannot assign to final attribute "SECOND_CONST" +[builtins fixtures/dataclasses.pyi] + +[case testDataclassFieldsProtocol] +# flags: --python-version 3.9 +from dataclasses import dataclass +from typing import Any, Protocol + +class ConfigProtocol(Protocol): + __dataclass_fields__: dict[str, Any] + +def takes_cp(cp: ConfigProtocol): ... + +@dataclass +class MyDataclass: + x: int = 3 + +takes_cp(MyDataclass) +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTypeAnnotationAliasUpdated] +import a +[file a.py] +from dataclasses import dataclass +from b import B + +@dataclass +class D: + x: B + +reveal_type(D) # N: Revealed type is "def (x: builtins.list[b.C]) -> a.D" +[file b.py] +from typing import List +import a +class CC: ... +class C(CC): ... +B = List[C] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassSelfType] +# flags: --strict-optional +from dataclasses import dataclass +from typing import Self, TypeVar, Generic, Optional + +T = TypeVar("T") + +@dataclass +class LinkedList(Generic[T]): + value: T + next: Optional[Self] = None + + def meth(self) -> None: + reveal_type(self.next) # N: Revealed type is "Union[Self`0, None]" + +l_int: LinkedList[int] = LinkedList(1, LinkedList("no", None)) # E: Argument 1 to "LinkedList" has incompatible type "str"; expected "int" + +@dataclass +class SubLinkedList(LinkedList[int]): ... + +lst = SubLinkedList(1, LinkedList(2)) # E: Argument 2 to "SubLinkedList" has incompatible type "LinkedList[int]"; expected "Optional[SubLinkedList]" +reveal_type(lst.next) # N: Revealed type is "Union[__main__.SubLinkedList, None]" +reveal_type(SubLinkedList) # N: Revealed type is "def (value: builtins.int, next: Union[__main__.SubLinkedList, None] =) -> __main__.SubLinkedList" +[builtins fixtures/dataclasses.pyi] + +[case testNoCrashOnNestedGenericCallable] +from dataclasses import dataclass +from typing import Generic, TypeVar, Callable + +T = TypeVar('T') +R = TypeVar('R') +X = TypeVar('X') + +@dataclass +class Box(Generic[T]): + inner: T + +@dataclass +class Cont(Generic[R]): + run: Box[Callable[[X], R]] + +def const_two(x: T) -> str: + return "two" + +c = Cont(Box(const_two)) +reveal_type(c) # N: Revealed type is "__main__.Cont[builtins.str]" +[builtins fixtures/dataclasses.pyi] + +[case testNoCrashOnSelfWithForwardRefGenericDataclass] +from typing import Generic, Sequence, TypeVar, Self +from dataclasses import dataclass + +_T = TypeVar('_T', bound="Foo") + +@dataclass +class Foo: + foo: int + +@dataclass +class Element(Generic[_T]): + elements: Sequence[Self] + +@dataclass +class Bar(Foo): ... +e: Element[Bar] +reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-default-plugin.test b/test-data/unit/check-default-plugin.test deleted file mode 100644 index 4d8844d254d1..000000000000 --- a/test-data/unit/check-default-plugin.test +++ /dev/null @@ -1,84 +0,0 @@ --- Test cases for the default plugin --- --- Note that we have additional test cases in pythoneval.test (that use real typeshed stubs). - - -[case testContextManagerWithGenericFunction] -from contextlib import contextmanager -from typing import TypeVar, Iterator - -T = TypeVar('T') - -@contextmanager -def yield_id(item: T) -> Iterator[T]: - yield item - -reveal_type(yield_id) # N: Revealed type is "def [T] (item: T`-1) -> contextlib.GeneratorContextManager[T`-1]" - -with yield_id(1) as x: - reveal_type(x) # N: Revealed type is "builtins.int" - -f = yield_id -def g(x, y): pass -f = g # E: Incompatible types in assignment (expression has type "Callable[[Any, Any], Any]", variable has type "Callable[[T], GeneratorContextManager[T]]") -[typing fixtures/typing-medium.pyi] -[builtins fixtures/tuple.pyi] - -[case testAsyncContextManagerWithGenericFunction] -# flags: --python-version 3.7 -from contextlib import asynccontextmanager -from typing import TypeVar, AsyncGenerator - -T = TypeVar('T') - -@asynccontextmanager -async def yield_id(item: T) -> AsyncGenerator[T, None]: - yield item - -reveal_type(yield_id) # N: Revealed type is "def [T] (item: T`-1) -> typing.AsyncContextManager[T`-1]" - -async def f() -> None: - async with yield_id(1) as x: - reveal_type(x) # N: Revealed type is "builtins.int" -[typing fixtures/typing-async.pyi] -[builtins fixtures/tuple.pyi] - -[case testContextManagerReturnsGenericFunction] -import contextlib -from typing import Callable -from typing import Generator -from typing import Iterable -from typing import TypeVar - -TArg = TypeVar('TArg') -TRet = TypeVar('TRet') - -@contextlib.contextmanager -def _thread_mapper(maxsize: int) -> Generator[ - Callable[[Callable[[TArg], TRet], Iterable[TArg]], Iterable[TRet]], - None, None, -]: - # defined inline as there isn't a builtins.map fixture - def my_map(f: Callable[[TArg], TRet], it: Iterable[TArg]) -> Iterable[TRet]: - for x in it: - yield f(x) - - yield my_map - -def identity(x: int) -> int: return x - -with _thread_mapper(1) as m: - lst = list(m(identity, [2, 3])) - reveal_type(lst) # N: Revealed type is "builtins.list[builtins.int]" -[typing fixtures/typing-medium.pyi] -[builtins fixtures/list.pyi] - -[case testContextManagerWithUnspecifiedArguments] -from contextlib import contextmanager -from typing import Callable, Iterator - -c: Callable[..., Iterator[int]] -reveal_type(c) # N: Revealed type is "def (*Any, **Any) -> typing.Iterator[builtins.int]" -reveal_type(contextmanager(c)) # N: Revealed type is "def (*Any, **Any) -> contextlib.GeneratorContextManager[builtins.int]" -[typing fixtures/typing-medium.pyi] -[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-dynamic-typing.test b/test-data/unit/check-dynamic-typing.test index 7b016c342e95..dd4cc1579639 100644 --- a/test-data/unit/check-dynamic-typing.test +++ b/test-data/unit/check-dynamic-typing.test @@ -47,6 +47,10 @@ class B: pass [case testCallingFunctionWithDynamicArgumentTypes] from typing import Any + +def f(x: Any) -> 'A': + pass + a, b = None, None # type: (A, B) if int(): @@ -61,15 +65,16 @@ if int(): if int(): a = f(f) -def f(x: Any) -> 'A': - pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] [case testCallingWithDynamicReturnType] from typing import Any + +def f(x: 'A') -> Any: + pass + a, b = None, None # type: (A, B) a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" @@ -77,9 +82,6 @@ a = f(b) # E: Argument 1 to "f" has incompatible type "B"; expected "A" a = f(a) b = f(a) -def f(x: 'A') -> Any: - pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] @@ -145,6 +147,7 @@ class int: pass class type: pass class function: pass class str: pass +class dict: pass [case testBinaryOperationsWithDynamicAsRightOperand] from typing import Any @@ -217,6 +220,7 @@ class int: pass class type: pass class function: pass class str: pass +class dict: pass [case testDynamicWithUnaryExpressions] from typing import Any @@ -283,6 +287,8 @@ class A: pass from typing import Any, cast class A: pass class B: pass +def f() -> None: pass + d = None # type: Any a = None # type: A b = None # type: B @@ -294,10 +300,15 @@ if int(): b = cast(Any, d) if int(): a = cast(Any, f()) -def f() -> None: pass - [case testCompatibilityOfDynamicWithOtherTypes] from typing import Any, Tuple + +def g(a: 'A') -> None: + pass + +class A: pass +class B: pass + d = None # type: Any t = None # type: Tuple[A, A] # TODO: callable types, overloaded functions @@ -308,12 +319,6 @@ d = g d = A t = d f = d - -def g(a: 'A') -> None: - pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] @@ -362,6 +367,8 @@ a = None # type: A g = None # type: Callable[[], None] h = None # type: Callable[[A], None] +def f(x): pass + f() # E: Missing positional argument "x" in call to "f" f(x, x) # E: Too many arguments for "f" if int(): @@ -373,8 +380,6 @@ if int(): if int(): h = f -def f(x): pass - class A: pass [case testImplicitGlobalFunctionSignatureWithDifferentArgCounts] @@ -384,6 +389,9 @@ g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] a = None # type: A +def f0(): pass +def f2(x, y): pass + if int(): g1 = f0 # E: Incompatible types in assignment (expression has type "Callable[[], Any]", variable has type "Callable[[A], None]") if int(): @@ -400,16 +408,18 @@ if int(): f0() f2(a, a) -def f0(): pass - -def f2(x, y): pass - class A: pass [case testImplicitGlobalFunctionSignatureWithDefaultArgs] from typing import Callable +class A: pass +class B: pass + a, b = None, None # type: (A, B) +def f01(x = b): pass +def f13(x, y = b, z = b): pass + g0 = None # type: Callable[[], None] g1 = None # type: Callable[[A], None] g2 = None # type: Callable[[A, A], None] @@ -443,11 +453,6 @@ if int(): if int(): g3 = f13 -def f01(x = b): pass -def f13(x, y = b, z = b): pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] [case testSkipTypeCheckingWithImplicitSignature] @@ -550,6 +555,10 @@ f(o, o, o) [case testInitMethodWithImplicitSignature] from typing import Callable + +class A: + def __init__(self, a, b): pass + f1 = None # type: Callable[[A], A] f2 = None # type: Callable[[A, A], A] a = None # type: A @@ -562,20 +571,14 @@ A(a, a) if int(): f2 = A -class A: - def __init__(self, a, b): pass - [case testUsingImplicitTypeObjectWithIs] - -t = None # type: type -t = A -t = B - class A: pass class B: def __init__(self): pass - +t = None # type: type +t = A +t = B -- Type compatibility -- ------------------ diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 039ddd1621cd..80a7ca7ff99f 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -482,13 +482,13 @@ W.c [typing fixtures/typing-medium.pyi] [out] main:2: error: Too few arguments for Enum() -main:3: error: Enum() expects a string, tuple, list or dict literal as the second argument +main:3: error: Second argument of Enum() must be string, tuple, list or dict literal for mypy to determine Enum members main:4: error: Too many arguments for Enum() -main:5: error: Enum() expects a string, tuple, list or dict literal as the second argument +main:5: error: Second argument of Enum() must be string, tuple, list or dict literal for mypy to determine Enum members main:5: error: Name "foo" is not defined -main:7: error: Enum() expects a string, tuple, list or dict literal as the second argument +main:7: error: Second argument of Enum() must be string, tuple, list or dict literal for mypy to determine Enum members main:8: error: Too few arguments for IntEnum() -main:9: error: IntEnum() expects a string, tuple, list or dict literal as the second argument +main:9: error: Second argument of IntEnum() must be string, tuple, list or dict literal for mypy to determine Enum members main:10: error: Too many arguments for IntEnum() main:11: error: Enum() needs at least one item main:12: error: Enum() needs at least one item @@ -549,7 +549,7 @@ reveal_type(list(Color)) # N: Revealed type is "builtins.list[__main__.Color]" [case testEnumWorkWithForward] from enum import Enum -a: E = E.x +a: E = E.x # type: ignore[used-before-def] class E(Enum): x = 1 y = 2 @@ -2100,3 +2100,30 @@ class Some: class A(Some, Enum): __labels__ = {1: "1"} [builtins fixtures/dict.pyi] + +[case testEnumWithPartialTypes] +from enum import Enum + +class Mixed(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = None + + def check(self) -> None: + reveal_type(Mixed.a.value) # N: Revealed type is "builtins.list[Any]" + reveal_type(Mixed.b.value) # N: Revealed type is "None" + + # Inferring Any here instead of a union seems to be a deliberate + # choice; see the testEnumValueInhomogenous case above. + reveal_type(self.value) # N: Revealed type is "Any" + + for field in Mixed: + reveal_type(field.value) # N: Revealed type is "Any" + if field.value is None: + pass + +class AllPartialList(Enum): + a = [] # E: Need type annotation for "a" (hint: "a: List[] = ...") + b = [] # E: Need type annotation for "b" (hint: "b: List[] = ...") + + def check(self) -> None: + reveal_type(self.value) # N: Revealed type is "builtins.list[Any]" diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index f1a6f3c77ada..8b3567ab7cf6 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -77,8 +77,8 @@ a = 'x'.foobar(b) # type: ignore[xyz, w, attr-defined] # E: Name "b" is not de # N: Error code "name-defined" not covered by "type: ignore" comment [case testErrorCodeIgnoreMultiple2] -a = 'x'.foobar(b) # type: int # type: ignore[name-defined, attr-defined] -b = 'x'.foobar(b) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] \ +a = 'x'.foobar(c) # type: int # type: ignore[name-defined, attr-defined] +b = 'x'.foobar(c) # type: int # type: ignore[name-defined, xyz] # E: "str" has no attribute "foobar" [attr-defined] \ # N: Error code "attr-defined" not covered by "type: ignore" comment [case testErrorCodeWarnUnusedIgnores1] @@ -180,7 +180,9 @@ import nostub # type: ignore[import] from defusedxml import xyz # type: ignore[import] [case testErrorCodeBadIgnore] -import nostub # type: ignore xyz # E: Invalid "type: ignore" comment [syntax] +import nostub # type: ignore xyz # E: Invalid "type: ignore" comment [syntax] \ + # E: Cannot find implementation or library stub for module named "nostub" [import] \ + # N: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports import nostub # type: ignore[ # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo # E: Invalid "type: ignore" comment [syntax] import nostub # type: ignore[foo, # E: Invalid "type: ignore" comment [syntax] @@ -207,6 +209,8 @@ def f(x, # type: int # type: ignore[ pass [out] main:2: error: Invalid "type: ignore" comment [syntax] +main:2: error: Cannot find implementation or library stub for module named "nostub" [import] +main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports main:3: error: Invalid "type: ignore" comment [syntax] main:4: error: Invalid "type: ignore" comment [syntax] main:5: error: Invalid "type: ignore" comment [syntax] @@ -241,7 +245,8 @@ x: f # E: Function "__main__.f" is not valid as a type [valid-type] \ # N: Perhaps you need "Callable[...]" or a callback protocol? import sys -y: sys # E: Module "sys" is not valid as a type [valid-type] +y: sys # E: Module "sys" is not valid as a type [valid-type] \ + # N: Perhaps you meant to use a protocol matching the module structure? z: y # E: Variable "__main__.y" is not valid as a type [valid-type] \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases [builtins fixtures/tuple.pyi] @@ -250,7 +255,7 @@ z: y # E: Variable "__main__.y" is not valid as a type [valid-type] \ from typing import TypeVar T = TypeVar('T') -def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same Typevar [type-var] +def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar [type-var] x = f() # E: Need type annotation for "x" [var-annotated] y = [] # E: Need type annotation for "y" (hint: "y: List[] = ...") [var-annotated] [builtins fixtures/list.pyi] @@ -450,11 +455,15 @@ class E(TypedDict): y: int a: D = {'x': ''} # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] -b: D = {'y': ''} # E: Extra key "y" for TypedDict "D" [typeddict-item] +b: D = {'y': ''} # E: Missing key "x" for TypedDict "D" [typeddict-item] \ + # E: Extra key "y" for TypedDict "D" [typeddict-unknown-key] c = D(x=0) if int() else E(x=0, y=0) c = {} # E: Expected TypedDict key "x" but found no keys [typeddict-item] +d: D = {'x': '', 'y': 1} # E: Extra key "y" for TypedDict "D" [typeddict-unknown-key] \ + # E: Incompatible types (expression has type "str", TypedDict item "x" has type "int") [typeddict-item] -a['y'] = 1 # E: TypedDict "D" has no key "y" [typeddict-item] + +a['y'] = 1 # E: TypedDict "D" has no key "y" [typeddict-unknown-key] a['x'] = 'x' # E: Value of "x" has incompatible type "str"; expected "int" [typeddict-item] a['y'] # E: TypedDict "D" has no key "y" [typeddict-item] [builtins fixtures/dict.pyi] @@ -467,12 +476,21 @@ class A(TypedDict): two_commonparts: int a: A = {'one_commonpart': 1, 'two_commonparts': 2} -a['other_commonpart'] = 3 # type: ignore[typeddict-item] +a['other_commonpart'] = 3 # type: ignore[typeddict-unknown-key] +not_exist = a['not_exist'] # type: ignore[typeddict-item] +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testErrorCodeTypedDictSubCodeIgnore] +from typing_extensions import TypedDict +class D(TypedDict): + x: int +d: D = {'x': 1, 'y': 2} # type: ignore[typeddict-item] [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] [case testErrorCodeCannotDetermineType] -y = x # E: Cannot determine type of "x" [has-type] +y = x # E: Cannot determine type of "x" [has-type] # E: Name "x" is used before definition [used-before-def] reveal_type(y) # N: Revealed type is "Any" x = None @@ -628,9 +646,6 @@ def g() -> int: [case testErrorCodeIgnoreNamedDefinedNote] x: List[int] # type: ignore[name-defined] -[case testErrorCodeIgnoreMiscNote] -x: [int] # type: ignore[misc] - [case testErrorCodeProtocolProblemsIgnore] from typing_extensions import Protocol @@ -665,7 +680,7 @@ class A: def g(self: A) -> None: pass -A.f = g # E: Cannot assign to a method [assignment] +A.f = g # E: Cannot assign to a method [method-assign] [case testErrorCodeDefinedHereNoteIgnore] import m @@ -801,12 +816,15 @@ from typing_extensions import TypedDict Foo = TypedDict("Bar", {}) # E: First argument "Bar" to TypedDict() does not match variable name "Foo" [name-match] [builtins fixtures/dict.pyi] + [case testTruthyBool] # flags: --enable-error-code truthy-bool -from typing import List, Union +from typing import List, Union, Any class Foo: pass +class Bar: + pass foo = Foo() if foo: # E: "__main__.foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] @@ -834,25 +852,49 @@ if good_union: if not good_union: pass -bad_union: Union[Foo, object] = Foo() -if bad_union: # E: "__main__.bad_union" has type "Union[Foo, object]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] +bad_union: Union[Foo, Bar] = Foo() +if bad_union: # E: "__main__.bad_union" has type "Union[Foo, Bar]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] pass -if not bad_union: # E: "__main__.bad_union" has type "object" which does not implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] +if not bad_union: # E: "__main__.bad_union" has type "Union[Foo, Bar]" of which no members implement __bool__ or __len__ so it could always be true in boolean context [truthy-bool] pass -def f(): - pass -if f: # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-bool] +# 'object' is special and is treated as potentially falsy +obj: object = Foo() +if obj: pass -if not f: # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-bool] +if not obj: pass -conditional_result = 'foo' if f else 'bar' # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-bool] lst: List[int] = [] if lst: pass + +a: Any +if a: + pass + +any_or_object: Union[object, Any] +if any_or_object: + pass [builtins fixtures/list.pyi] +[case testTruthyFunctions] +# flags: --strict-optional +def f(): + pass +if f: # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] + pass +if not f: # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] + pass +conditional_result = 'foo' if f else 'bar' # E: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] + +[case testTruthyIterable] +# flags: --strict-optional --enable-error-code truthy-iterable +from typing import Iterable +def func(var: Iterable[str]) -> None: + if var: # E: "var" has type "Iterable[str]" which can always be true in boolean context. Consider using "Collection[str]" instead. [truthy-iterable] + ... + [case testNoOverloadImplementation] from typing import overload @@ -916,3 +958,96 @@ def f(d: D, s: str) -> None: d[s] # type: ignore[literal-required] [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testRecommendErrorCode] +# type: ignore[whatever] # E: type ignore with error code is not supported for modules; use `# mypy: disable-error-code=...` [syntax] +1 + "asdf" + +[case testShowErrorCodesInConfig] +# flags: --config-file tmp/mypy.ini +# Test 'show_error_codes = True' in config doesn't raise an exception +var: int = "" # E: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] + +[file mypy.ini] +\[mypy] +show_error_codes = True + +[case testErrorCodeUnsafeSuper_no_empty] +# flags: --strict-optional +from abc import abstractmethod + +class Base: + @abstractmethod + def meth(self) -> int: + raise NotImplementedError() +class Sub(Base): + def meth(self) -> int: + return super().meth() # E: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe [safe-super] +[builtins fixtures/exception.pyi] + +[case testDedicatedErrorCodeForEmpty_no_empty] +# flags: --strict-optional +from typing import Optional +def foo() -> int: ... # E: Missing return statement [empty-body] +def bar() -> None: ... +# This is inconsistent with how --warn-no-return behaves in general +# but we want to minimize fallout of finally handling empty bodies. +def baz() -> Optional[int]: ... # OK + +[case testDedicatedErrorCodeTypeAbstract] +import abc +from typing import TypeVar, Type + +class C(abc.ABC): + @abc.abstractmethod + def foo(self) -> None: ... + +T = TypeVar("T") +def test(tp: Type[T]) -> T: ... +test(C) # E: Only concrete class can be given where "Type[C]" is expected [type-abstract] + +class D(C): + @abc.abstractmethod + def bar(self) -> None: ... +cls: Type[C] = D # E: Can only assign concrete classes to a variable of type "Type[C]" [type-abstract] + +[case testUncheckedAnnotationCodeShown] +def f(): + x: int = "no" # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs [annotation-unchecked] + +[case testUncheckedAnnotationSuppressed] +# flags: --disable-error-code=annotation-unchecked +def f(): + x: int = "no" # No warning here + +[case testMethodAssignmentSuppressed] +# flags: --disable-error-code=method-assign +class A: + def f(self) -> None: pass + def g(self) -> None: pass + +def h(self: A) -> None: pass + +A.f = h +# This actually works at runtime, but there is no way to express this in current type system +A.f = A().g # E: Incompatible types in assignment (expression has type "Callable[[], None]", variable has type "Callable[[A], None]") [assignment] + +[case testMethodAssignCoveredByAssignmentIgnore] +class A: + def f(self) -> None: pass +def h(self: A) -> None: pass +A.f = h # type: ignore[assignment] + +[case testMethodAssignCoveredByAssignmentFlag] +# flags: --disable-error-code=assignment +class A: + def f(self) -> None: pass +def h(self: A) -> None: pass +A.f = h # OK + +[case testMethodAssignCoveredByAssignmentUnused] +# flags: --warn-unused-ignores +class A: + def f(self) -> None: pass +def h(self: A) -> None: pass +A.f = h # type: ignore[assignment] # E: Unused "type: ignore" comment, use narrower [method-assign] instead of [assignment] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 577e71d78482..49a3f0d4aaa7 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -692,6 +692,7 @@ tmp/m.py:8: error: Invalid index type "int" for "A"; expected type "str" [case testDivmod] +# flags: --disable-error-code=used-before-def from typing import Tuple, Union, SupportsInt _Decimal = Union[Decimal, int] class Decimal(SupportsInt): @@ -857,8 +858,8 @@ a[b] a[c] a[1] # E: No overload variant of "__getitem__" of "A" matches argument type "int" \ # N: Possible overload variants: \ - # N: def __getitem__(self, B) -> int \ - # N: def __getitem__(self, C) -> str + # N: def __getitem__(self, B, /) -> int \ + # N: def __getitem__(self, C, /) -> str i, s = None, None # type: (int, str) if int(): @@ -952,11 +953,54 @@ y: Gen[Literal[1]] = assert_type(Gen(1), Gen[Literal[1]]) [builtins fixtures/tuple.pyi] +[case testAssertTypeUncheckedFunction] +from typing import assert_type +from typing_extensions import Literal +def f(): + x = 42 + assert_type(x, Literal[42]) +[out] +main:5: error: Expression is of type "Any", not "Literal[42]" +main:5: note: "assert_type" expects everything to be "Any" in unchecked functions +[builtins fixtures/tuple.pyi] + +[case testAssertTypeUncheckedFunctionWithUntypedCheck] +# flags: --check-untyped-defs +from typing import assert_type +from typing_extensions import Literal +def f(): + x = 42 + assert_type(x, Literal[42]) +[out] +main:6: error: Expression is of type "int", not "Literal[42]" +[builtins fixtures/tuple.pyi] + +[case testAssertTypeNoPromoteUnion] +from typing import Union, assert_type + +Scalar = Union[int, bool, bytes, bytearray] + + +def reduce_it(s: Scalar) -> Scalar: + return s + +assert_type(reduce_it(True), Scalar) +[builtins fixtures/tuple.pyi] + -- None return type -- ---------------- [case testNoneReturnTypeBasics] +def f() -> None: + pass + +class A: + def g(self, x: object) -> None: + pass + def __call__(self) -> None: + pass + a, o = None, None # type: (A, object) if int(): a = f() # E: "f" does not return a value @@ -970,40 +1014,30 @@ A().g(f()) # E: "f" does not return a value x: A = f() # E: "f" does not return a value f() A().g(a) - -def f() -> None: - pass - -class A: - def g(self, x: object) -> None: - pass - def __call__(self) -> None: - pass [builtins fixtures/tuple.pyi] [case testNoneReturnTypeWithStatements] import typing -if f(): # Fail +def f() -> None: pass + +if f(): # E: "f" does not return a value pass -elif f(): # Fail +elif f(): # E: "f" does not return a value pass -while f(): # Fail +while f(): # E: "f" does not return a value pass def g() -> object: - return f() # Fail -raise f() # Fail - -def f() -> None: pass + return f() # E: "f" does not return a value +raise f() # E: "f" does not return a value [builtins fixtures/exception.pyi] -[out] -main:2: error: "f" does not return a value -main:4: error: "f" does not return a value -main:6: error: "f" does not return a value -main:9: error: "f" does not return a value -main:10: error: "f" does not return a value [case testNoneReturnTypeWithExpressions] from typing import cast + +def f() -> None: pass +class A: + def __add__(self, x: 'A') -> 'A': pass + a = None # type: A [f()] # E: "f" does not return a value f() + a # E: "f" does not return a value @@ -1012,15 +1046,16 @@ f() == a # E: "f" does not return a value a != f() # E: "f" does not return a value cast(A, f()) f().foo # E: "f" does not return a value - -def f() -> None: pass -class A: - def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/list.pyi] [case testNoneReturnTypeWithExpressions2] import typing +def f() -> None: pass +class A: + def __add__(self, x: 'A') -> 'A': + pass + a, b = None, None # type: (A, bool) f() in a # E: "f" does not return a value # E: Unsupported right operand type for in ("A") a < f() # E: "f" does not return a value @@ -1030,11 +1065,6 @@ a in f() # E: "f" does not return a value not f() # E: "f" does not return a value f() and b # E: "f" does not return a value b or f() # E: "f" does not return a value - -def f() -> None: pass -class A: - def __add__(self, x: 'A') -> 'A': - pass [builtins fixtures/bool.pyi] @@ -1390,19 +1420,13 @@ z = x.append(y) if bool() else x.append(y) # E: "append" of "list" does not retu from typing import cast class A: def __add__(self, a: 'A') -> 'A': pass -a = None # type: A -None + a # Fail -f + a # Fail -a + f # Fail -cast(A, f) - def f() -> None: pass -[out] -main:5: error: Unsupported left operand type for + ("None") -main:6: error: Unsupported left operand type for + ("Callable[[], None]") -main:7: error: Unsupported operand types for + ("A" and "Callable[[], None]") - +a = None # type: A +None + a # E: Unsupported left operand type for + ("None") +f + a # E: Unsupported left operand type for + ("Callable[[], None]") +a + f # E: Unsupported operand types for + ("A" and "Callable[[], None]") +cast(A, f) [case testOperatorMethodWithInvalidArgCount] a = None # type: A @@ -1690,7 +1714,7 @@ reveal_type = 1 [case testRevealForward] def f() -> None: reveal_type(x) -x = 1 + 1 +x = 1 + int() [out] main:2: note: Revealed type is "builtins.int" @@ -1961,6 +1985,24 @@ class B: A() == B() # E: Unsupported operand types for == ("A" and "B") [builtins fixtures/bool.pyi] +[case testStrictEqualitySequenceAndCustomEq] +# flags: --strict-equality +from typing import Tuple + +class C: pass +class D: + def __eq__(self, other): return True + +a = [C()] +b = [D()] +a == b +b == a +t1: Tuple[C, ...] +t2: Tuple[D, ...] +t1 == t2 +t2 == t1 +[builtins fixtures/bool.pyi] + [case testCustomEqCheckStrictEqualityOKInstance] # flags: --strict-equality class A: @@ -2179,6 +2221,32 @@ int == y y == int [builtins fixtures/bool.pyi] +[case testStrictEqualityAndEnumWithCustomEq] +# flags: --strict-equality +from enum import Enum + +class E1(Enum): + X = 0 + Y = 1 + +class E2(Enum): + X = 0 + Y = 1 + + def __eq__(self, other: object) -> bool: + return bool() + +E1.X == E1.Y # E: Non-overlapping equality check (left operand type: "Literal[E1.X]", right operand type: "Literal[E1.Y]") +E2.X == E2.Y +[builtins fixtures/bool.pyi] + +[case testStrictEqualityWithBytesContains] +# flags: --strict-equality +data = b"xy" +b"x" in data +[builtins fixtures/primitives.pyi] +[typing fixtures/typing-full.pyi] + [case testUnimportedHintAny] def f(x: Any) -> None: # E: Name "Any" is not defined \ # N: Did you forget to import it from "typing"? (Suggestion: "from typing import Any") diff --git a/test-data/unit/check-fastparse.test b/test-data/unit/check-fastparse.test index 848d91b1659d..f172a9727d49 100644 --- a/test-data/unit/check-fastparse.test +++ b/test-data/unit/check-fastparse.test @@ -106,6 +106,7 @@ class C: [builtins fixtures/property.pyi] [case testFastParsePerArgumentAnnotations] +# flags: --implicit-optional class A: pass class B: pass @@ -130,6 +131,7 @@ def f(a, # type: A [out] [case testFastParsePerArgumentAnnotationsWithReturn] +# flags: --implicit-optional class A: pass class B: pass diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 5b5d49c80708..0ac39ebf9c10 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -366,6 +366,22 @@ def f() -> NoReturn: # E: Implicit return in function which does not return non_trivial_function = 1 [builtins fixtures/dict.pyi] +[case testNoReturnImplicitReturnCheckInDeferredNode] +# flags: --warn-no-return +from typing import NoReturn + +def exit() -> NoReturn: ... + +def force_forward_reference() -> int: + return 4 + +def f() -> NoReturn: + x + exit() + +x = force_forward_reference() +[builtins fixtures/exception.pyi] + [case testNoReturnNoWarnNoReturn] # flags: --warn-no-return from mypy_extensions import NoReturn @@ -827,6 +843,7 @@ standard.f(None) [file mypy.ini] \[mypy] strict_optional = False +implicit_optional = true \[mypy-optional] strict_optional = True @@ -846,6 +863,7 @@ standard.f(None) [file pyproject.toml] \[tool.mypy] strict_optional = false +implicit_optional = true \[[tool.mypy.overrides]] module = 'optional' strict_optional = true @@ -1134,13 +1152,13 @@ from typing import Any def f(s): yield s +def g(x) -> Any: + yield x # E: Expression has type "Any" + x = f(0) # E: Expression has type "Any" for x in f(0): # E: Expression has type "Any" g(x) # E: Expression has type "Any" -def g(x) -> Any: - yield x # E: Expression has type "Any" - l = [1, 2, 3] l[f(0)] # E: Expression has type "Any" f(l) @@ -1588,14 +1606,19 @@ strict_equality = false [case testNoImplicitReexport] -# flags: --no-implicit-reexport -from other_module_2 import a +# flags: --no-implicit-reexport --show-error-codes +from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a" [attr-defined] +reveal_type(a) # N: Revealed type is "builtins.int" + +import other_module_2 +# TODO: this should also reveal builtins.int, see #13965 +reveal_type(other_module_2.a) # E: "object" does not explicitly export attribute "a" [attr-defined] \ + # N: Revealed type is "Any" + [file other_module_1.py] a = 5 [file other_module_2.py] from other_module_1 import a -[out] -main:2: error: Module "other_module_2" does not explicitly export attribute "a"; implicit reexport disabled [case testNoImplicitReexportRespectsAll] # flags: --no-implicit-reexport @@ -1609,7 +1632,7 @@ from other_module_1 import a, b __all__ = ('b',) [builtins fixtures/tuple.pyi] [out] -main:2: error: Module "other_module_2" does not explicitly export attribute "a"; implicit reexport disabled +main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testNoImplicitReexportStarConsideredExplicit] # flags: --no-implicit-reexport @@ -1625,7 +1648,7 @@ __all__ = ('b',) [case testNoImplicitReexportGetAttr] # flags: --no-implicit-reexport --python-version 3.7 -from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a"; implicit reexport disabled +from other_module_2 import a # E: Module "other_module_2" does not explicitly export attribute "a" [file other_module_1.py] from typing import Any def __getattr__(name: str) -> Any: ... @@ -1643,7 +1666,7 @@ attr_2 = 6 [file other_module_2.py] from other_module_1 import attr_1, attr_2 [out] -main:2: error: Module "other_module_2" does not explicitly export attribute "attr_1"; implicit reexport disabled +main:2: error: Module "other_module_2" does not explicitly export attribute "attr_1" [case testNoImplicitReexportMypyIni] # flags: --config-file tmp/mypy.ini @@ -1661,7 +1684,7 @@ implicit_reexport = True \[mypy-other_module_2] implicit_reexport = False [out] -main:2: error: Module "other_module_2" has no attribute "a" +main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testNoImplicitReexportPyProjectTOML] @@ -1682,7 +1705,7 @@ module = 'other_module_2' implicit_reexport = false [out] -main:2: error: Module "other_module_2" has no attribute "a" +main:2: error: Module "other_module_2" does not explicitly export attribute "a" [case testImplicitAnyOKForNoArgs] @@ -2005,12 +2028,12 @@ x = 'should be fine' x.trim() [case testDisableDifferentErrorCode] -# flags: --disable-error-code name-defined --show-error-code +# flags: --disable-error-code name-defined --show-error-codes x = 'should not be fine' x.trim() # E: "str" has no attribute "trim" [attr-defined] [case testDisableMultipleErrorCode] -# flags: --disable-error-code attr-defined --disable-error-code return-value --show-error-code +# flags: --disable-error-code attr-defined --disable-error-code return-value --show-error-codes x = 'should be fine' x.trim() @@ -2020,12 +2043,12 @@ def bad_return_type() -> str: bad_return_type('no args taken!') # E: Too many arguments for "bad_return_type" [call-arg] [case testEnableErrorCode] -# flags: --disable-error-code attr-defined --enable-error-code attr-defined --show-error-code +# flags: --disable-error-code attr-defined --enable-error-code attr-defined --show-error-codes x = 'should be fine' x.trim() # E: "str" has no attribute "trim" [attr-defined] [case testEnableDifferentErrorCode] -# flags: --disable-error-code attr-defined --enable-error-code name-defined --show-error-code +# flags: --disable-error-code attr-defined --enable-error-code name-defined --show-error-codes x = 'should not be fine' x.trim() y.trim() # E: Name "y" is not defined [name-defined] @@ -2036,7 +2059,7 @@ y.trim() # E: Name "y" is not defined [name-defined] --disable-error-code return-value \ --disable-error-code call-arg \ --enable-error-code attr-defined \ - --enable-error-code return-value --show-error-code + --enable-error-code return-value --show-error-codes x = 'should be fine' x.trim() # E: "str" has no attribute "trim" [attr-defined] @@ -2053,3 +2076,101 @@ def f(x): y = 1 f(reveal_type(y)) # E: Call to untyped function "f" in typed context \ # N: Revealed type is "builtins.int" + +[case testPerModuleErrorCodes] +# flags: --config-file tmp/mypy.ini +import tests.foo +import bar +[file bar.py] +x = [] # E: Need type annotation for "x" (hint: "x: List[] = ...") +[file tests/__init__.py] +[file tests/foo.py] +x = [] # OK +[file mypy.ini] +\[mypy] +strict = True + +\[mypy-tests.*] +allow_untyped_defs = True +allow_untyped_calls = True +disable_error_code = var-annotated + +[case testPerModuleErrorCodesOverride] +# flags: --config-file tmp/mypy.ini +import tests.foo +import bar +[file bar.py] +def foo() -> int: ... +if foo: ... # E: Function "Callable[[], int]" could always be true in boolean context +42 + "no" # type: ignore # E: "type: ignore" comment without error code (consider "type: ignore[operator]" instead) +[file tests/__init__.py] +[file tests/foo.py] +def foo() -> int: ... +if foo: ... # E: Function "Callable[[], int]" could always be true in boolean context +42 + "no" # type: ignore +[file mypy.ini] +\[mypy] +enable_error_code = ignore-without-code, truthy-bool, used-before-def + +\[mypy-tests.*] +disable_error_code = ignore-without-code + +[case testShowErrorCodes] +# flags: --show-error-codes +x: int = "" # E: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] + +[case testHideErrorCodes] +# flags: --hide-error-codes +x: int = "" # E: Incompatible types in assignment (expression has type "str", variable has type "int") + +[case testTypeVarTupleDisabled_no_incomplete] +from typing_extensions import TypeVarTuple +Ts = TypeVarTuple("Ts") # E: "TypeVarTuple" support is experimental, use --enable-incomplete-feature=TypeVarTuple to enable +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleEnabled_no_incomplete] +# flags: --enable-incomplete-feature=TypeVarTuple +from typing_extensions import TypeVarTuple +Ts = TypeVarTuple("Ts") # OK +[builtins fixtures/tuple.pyi] + + +[case testDisableBytearrayPromotion] +# flags: --disable-bytearray-promotion +def f(x: bytes) -> None: ... +f(bytearray(b"asdf")) # E: Argument 1 to "f" has incompatible type "bytearray"; expected "bytes" +f(memoryview(b"asdf")) +[builtins fixtures/primitives.pyi] + +[case testDisableMemoryviewPromotion] +# flags: --disable-memoryview-promotion +def f(x: bytes) -> None: ... +f(bytearray(b"asdf")) +f(memoryview(b"asdf")) # E: Argument 1 to "f" has incompatible type "memoryview"; expected "bytes" +[builtins fixtures/primitives.pyi] + +[case testDisableBytearrayMemoryviewPromotionStrictEquality] +# flags: --disable-bytearray-promotion --disable-memoryview-promotion --strict-equality +def f(x: bytes, y: bytearray, z: memoryview) -> None: + x == y + y == z + x == z + 97 in x + 97 in y + 97 in z + x in y + x in z +[builtins fixtures/primitives.pyi] + +[case testEnableBytearrayMemoryviewPromotionStrictEquality] +# flags: --strict-equality +def f(x: bytes, y: bytearray, z: memoryview) -> None: + x == y + y == z + x == z + 97 in x + 97 in y + 97 in z + x in y + x in z +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-functions.test b/test-data/unit/check-functions.test index 32d531ebbe99..c23bbb77f643 100644 --- a/test-data/unit/check-functions.test +++ b/test-data/unit/check-functions.test @@ -337,6 +337,11 @@ class A: pass [out] [case testCompatibilityOfSimpleTypeObjectWithStdType] +class A: + def __init__(self, a: 'A') -> None: pass + +def f() -> None: pass + t = None # type: type a = None # type: A @@ -347,11 +352,6 @@ if int(): if int(): t = A -class A: - def __init__(self, a: 'A') -> None: pass - -def f() -> None: pass - [case testFunctionTypesWithOverloads] from foo import * [file foo.pyi] @@ -465,6 +465,12 @@ if int(): [case testCallingFunctionsWithDefaultArgumentValues] +# flags: --implicit-optional --no-strict-optional +class A: pass +class AA(A): pass +class B: pass + +def f(x: 'A' = None) -> 'B': pass a, b = None, None # type: (A, B) if int(): @@ -481,11 +487,6 @@ if int(): if int(): b = f(AA()) -def f(x: 'A' = None) -> 'B': pass - -class A: pass -class AA(A): pass -class B: pass [builtins fixtures/tuple.pyi] [case testDefaultArgumentExpressions] @@ -1132,6 +1133,7 @@ def dec(f: T) -> T: [out] [case testForwardReferenceToFunctionWithMultipleDecorators] +# flags: --disable-error-code=used-before-def def f(self) -> None: g() g(1) @@ -1166,6 +1168,7 @@ def dec(f): return f [builtins fixtures/staticmethod.pyi] [case testForwardRefereceToDecoratedFunctionWithCallExpressionDecorator] +# flags: --disable-error-code=used-before-def def f(self) -> None: g() g(1) @@ -1393,7 +1396,11 @@ x = None # type: Any if x: def f(x: int) -> None: pass else: - def f(x): pass # E: All conditional function variants must have identical signatures + def f(x): pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: int) -> None \ + # N: Redefinition: \ + # N: def f(x: Any) -> Any [case testIncompatibleConditionalFunctionDefinition2] from typing import Any @@ -1401,7 +1408,11 @@ x = None # type: Any if x: def f(x: int) -> None: pass else: - def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures + def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: int) -> None \ + # N: Redefinition: \ + # N: def f(y: int) -> None [case testIncompatibleConditionalFunctionDefinition3] from typing import Any @@ -1409,7 +1420,11 @@ x = None # type: Any if x: def f(x: int) -> None: pass else: - def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures + def f(x: int = 0) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: int) -> None \ + # N: Redefinition: \ + # N: def f(x: int = ...) -> None [case testConditionalFunctionDefinitionUsingDecorator1] from typing import Callable @@ -1462,19 +1477,41 @@ else: @dec def f(): pass +[case testConditionalFunctionDefinitionUnreachable] +def bar() -> None: + if False: + foo = 1 + else: + def foo(obj): ... + +def baz() -> None: + if False: + foo: int = 1 + else: + def foo(obj): ... # E: Incompatible redefinition (redefinition with type "Callable[[Any], Any]", original type "int") +[builtins fixtures/tuple.pyi] + [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition1] from typing import Any def f(x: str) -> None: pass x = None # type: Any if x: - def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures + def f(x: int) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: str) -> None \ + # N: Redefinition: \ + # N: def f(x: int) -> None [case testConditionalRedefinitionOfAnUnconditionalFunctionDefinition2] from typing import Any def f(x: int) -> None: pass # N: "f" defined here x = None # type: Any if x: - def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures + def f(y: int) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: int) -> None \ + # N: Redefinition: \ + # N: def f(y: int) -> None f(x=1) # The first definition takes precedence. f(y=1) # E: Unexpected keyword argument "y" for "f" @@ -1640,7 +1677,11 @@ class A: if x: def f(self, x: int) -> None: pass else: - def f(self, x): pass # E: All conditional function variants must have identical signatures + def f(self, x): pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(self: A, x: int) -> None \ + # N: Redefinition: \ + # N: def f(self: A, x: Any) -> Any [out] [case testConditionalFunctionDefinitionInTry] @@ -2569,9 +2610,9 @@ import p def f() -> int: ... [case testLambdaDefaultTypeErrors] -lambda a=nonsense: a # E: Name "nonsense" is not defined lambda a=(1 + 'asdf'): a # E: Unsupported operand types for + ("int" and "str") -def f(x: int = i): # E: Name "i" is not defined +lambda a=nonsense: a # E: Name "nonsense" is not defined +def f(x: int = i): # E: Name "i" is not defined # E: Name "i" is used before definition i = 42 [case testRevealTypeOfCallExpressionReturningNoneWorks] @@ -2669,8 +2710,18 @@ class A: @dec def e(self) -> int: pass @property - def g() -> int: pass # E: Method must have at least one argument + def g() -> int: pass # E: Method must have at least one argument. Did you forget the "self" argument? @property def h(self, *args, **kwargs) -> int: pass # OK [builtins fixtures/property.pyi] [out] + +[case testSubtypingUnionGenericBounds] +from typing import Callable, TypeVar, Union, Sequence + +TI = TypeVar("TI", bound=int) +TS = TypeVar("TS", bound=str) + +f: Callable[[Sequence[TI]], None] +g: Callable[[Union[Sequence[TI], Sequence[TS]]], None] +f = g diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test index bd1f487bc895..1f06bc7c540a 100644 --- a/test-data/unit/check-generic-subtyping.test +++ b/test-data/unit/check-generic-subtyping.test @@ -1033,3 +1033,21 @@ x2: X2[str, int] reveal_type(iter(x2)) # N: Revealed type is "typing.Iterator[builtins.int]" reveal_type([*x2]) # N: Revealed type is "builtins.list[builtins.int]" [builtins fixtures/dict.pyi] + +[case testIncompatibleVariance] +from typing import TypeVar, Generic +T = TypeVar('T') +T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) + +class A(Generic[T_co]): ... +class B(A[T_contra], Generic[T_contra]): ... # E: Variance of TypeVar "T_contra" incompatible with variance in parent type + +class C(Generic[T_contra]): ... +class D(C[T_co], Generic[T_co]): ... # E: Variance of TypeVar "T_co" incompatible with variance in parent type + +class E(Generic[T]): ... +class F(E[T_co], Generic[T_co]): ... # E: Variance of TypeVar "T_co" incompatible with variance in parent type + +class G(Generic[T]): ... +class H(G[T_contra], Generic[T_contra]): ... # E: Variance of TypeVar "T_contra" incompatible with variance in parent type diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index b8d70d1dae96..a62028ca94ea 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -20,21 +20,19 @@ class C: pass [case testGenericMethodArgument] from typing import TypeVar, Generic T = TypeVar('T') -a.f(c) # Fail -a.f(b) + +class A(Generic[T]): + def f(self, a: T) -> None: pass a = None # type: A[B] b = None # type: B c = None # type: C -class A(Generic[T]): - def f(self, a: T) -> None: pass +a.f(c) # E: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" +a.f(b) class B: pass class C: pass -[out] -main:3: error: Argument 1 to "f" of "A" has incompatible type "C"; expected "B" - [case testGenericMemberVariable] from typing import TypeVar, Generic T = TypeVar('T') @@ -655,16 +653,16 @@ a: other.Array[float] reveal_type(a) # N: Revealed type is "other.array[Any, other.dtype[builtins.float]]" [out] -main:3: error: Type argument "float" of "dtype" must be a subtype of "generic" [type-var] +main:3: error: Type argument "float" of "Array" must be a subtype of "generic" [type-var] a: other.Array[float] ^ [file other.py] from typing import Any, Generic, TypeVar -DT = TypeVar("DT", covariant=True, bound=dtype[Any]) -DTS = TypeVar("DTS", covariant=True, bound=generic) +DT = TypeVar("DT", covariant=True, bound='dtype[Any]') +DTS = TypeVar("DTS", covariant=True, bound='generic') S = TypeVar("S", bound=Any) -ST = TypeVar("ST", bound=generic, covariant=True) +ST = TypeVar("ST", bound='generic', covariant=True) class common: pass class generic(common): pass @@ -1031,8 +1029,9 @@ IntNode[int](1, 1) IntNode[int](1, 'a') # E: Argument 2 to "Node" has incompatible type "str"; expected "int" SameNode = Node[T, T] -# TODO: fix https://github.com/python/mypy/issues/7084. -ff = SameNode[T](1, 1) +ff = SameNode[T](1, 1) # E: Type variable "__main__.T" is unbound \ + # N: (Hint: Use "Generic[T]" or "Protocol[T]" base class to bind "T" inside a class) \ + # N: (Hint: Use "T" in function signature to bind "T" inside a function) a = SameNode(1, 'x') reveal_type(a) # N: Revealed type is "__main__.Node[Any, Any]" b = SameNode[int](1, 1) @@ -1049,20 +1048,20 @@ CA = Callable[[T], int] TA = Tuple[T, int] UA = Union[T, int] -cs = CA + 1 # E: Unsupported left operand type for + ("object") +cs = CA + 1 # E: Unsupported left operand type for + ("") reveal_type(cs) # N: Revealed type is "Any" -ts = TA() # E: "object" not callable +ts = TA() # E: "" not callable reveal_type(ts) # N: Revealed type is "Any" -us = UA.x # E: "object" has no attribute "x" +us = UA.x # E: "" has no attribute "x" reveal_type(us) # N: Revealed type is "Any" xx = CA[str] + 1 # E: Type application is only supported for generic classes yy = TA[str]() # E: Type application is only supported for generic classes zz = UA[str].x # E: Type application is only supported for generic classes [builtins fixtures/tuple.pyi] - +[typing fixtures/typing-medium.pyi] [out] [case testGenericTypeAliasesTypeVarBinding] @@ -1101,13 +1100,12 @@ BadA = A[str, T] # One error here SameA = A[T, T] x = None # type: SameA[int] -y = None # type: SameA[str] # Two errors here, for both args of A +y = None # type: SameA[str] # Another error here [builtins fixtures/list.pyi] [out] main:9:8: error: Value of type variable "T" of "A" cannot be "str" -main:13:1: error: Value of type variable "T" of "A" cannot be "str" -main:13:1: error: Value of type variable "S" of "A" cannot be "str" +main:13:1: error: Value of type variable "T" of "SameA" cannot be "str" [case testGenericTypeAliasesIgnoredPotentialAlias] class A: ... @@ -1333,6 +1331,7 @@ class type: pass class tuple: pass class function: pass class str: pass +class dict: pass [case testMultipleAssignmentWithIterable] from typing import Iterable, TypeVar @@ -1383,10 +1382,11 @@ Z = TypeVar('Z') class OO: pass a = None # type: A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object] -f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" - def f(a: OO) -> None: pass + +f(a) # E: Argument 1 to "f" has incompatible type "A[object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object, object]"; expected "OO" + class A(Generic[B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z]): pass [case testErrorWithShorterGenericTypeName] @@ -1394,9 +1394,10 @@ from typing import TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, B] +def f(a: 'B') -> None: pass + f(a) # E: Argument 1 to "f" has incompatible type "A[object, B]"; expected "B" -def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass @@ -1405,9 +1406,10 @@ from typing import Callable, TypeVar, Generic S = TypeVar('S') T = TypeVar('T') a = None # type: A[object, Callable[[], None]] +def f(a: 'B') -> None: pass + f(a) # E: Argument 1 to "f" has incompatible type "A[object, Callable[[], None]]"; expected "B" -def f(a: 'B') -> None: pass class A(Generic[S, T]): pass class B: pass @@ -1544,7 +1546,7 @@ class C(Generic[T]): reveal_type(C.F(17).foo()) # N: Revealed type is "builtins.int" reveal_type(C("").F(17).foo()) # N: Revealed type is "builtins.int" reveal_type(C.F) # N: Revealed type is "def [K] (k: K`1) -> __main__.C.F[K`1]" -reveal_type(C("").F) # N: Revealed type is "def [K] (k: K`1) -> __main__.C.F[K`1]" +reveal_type(C("").F) # N: Revealed type is "def [K] (k: K`6) -> __main__.C.F[K`6]" -- Callable subtyping with generic functions @@ -1557,9 +1559,9 @@ A = TypeVar('A') B = TypeVar('B') def f1(x: A) -> A: ... -def f2(x: A) -> B: ... # E: A function returning TypeVar should receive at least one argument containing the same Typevar +def f2(x: A) -> B: ... # E: A function returning TypeVar should receive at least one argument containing the same TypeVar def f3(x: B) -> B: ... -def f4(x: int) -> A: ... # E: A function returning TypeVar should receive at least one argument containing the same Typevar +def f4(x: int) -> A: ... # E: A function returning TypeVar should receive at least one argument containing the same TypeVar y1 = f1 if int(): @@ -1608,8 +1610,8 @@ B = TypeVar('B') T = TypeVar('T') def outer(t: T) -> None: def f1(x: A) -> A: ... - def f2(x: A) -> B: ... # E: A function returning TypeVar should receive at least one argument containing the same Typevar - def f3(x: T) -> A: ... # E: A function returning TypeVar should receive at least one argument containing the same Typevar + def f2(x: A) -> B: ... # E: A function returning TypeVar should receive at least one argument containing the same TypeVar + def f3(x: T) -> A: ... # E: A function returning TypeVar should receive at least one argument containing the same TypeVar def f4(x: A) -> T: ... def f5(x: T) -> T: ... @@ -1778,7 +1780,7 @@ from typing import TypeVar A = TypeVar('A') B = TypeVar('B') def f1(x: int, y: A) -> A: ... -def f2(x: int, y: A) -> B: ... # E: A function returning TypeVar should receive at least one argument containing the same Typevar +def f2(x: int, y: A) -> B: ... # E: A function returning TypeVar should receive at least one argument containing the same TypeVar def f3(x: A, y: B) -> B: ... g = f1 g = f2 @@ -2143,7 +2145,7 @@ from typing import Generic, TypeVar, Any, Tuple, Type T = TypeVar('T') S = TypeVar('S') -Q = TypeVar('Q', bound=A[Any]) +Q = TypeVar('Q', bound='A[Any]') class A(Generic[T]): @classmethod @@ -2580,3 +2582,119 @@ class Bar(Foo[AnyStr]): [out] main:10: error: Argument 1 to "method1" of "Foo" has incompatible type "str"; expected "AnyStr" main:10: error: Argument 2 to "method1" of "Foo" has incompatible type "bytes"; expected "AnyStr" + +[case testTypeVariableClashVar] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + x: Callable[[T], R] + +def func(x: C[R]) -> R: + return x.x(42) # OK + +[case testTypeVariableClashVarTuple] +from typing import Generic, TypeVar, Callable, Tuple + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + x: Callable[[T], Tuple[R, T]] + +def func(x: C[R]) -> R: + if bool(): + return x.x(42)[0] # OK + else: + return x.x(42)[1] # E: Incompatible return value type (got "int", expected "R") +[builtins fixtures/tuple.pyi] + +[case testTypeVariableClashMethod] +from typing import Generic, TypeVar, Callable + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + def x(self) -> Callable[[T], R]: ... + +def func(x: C[R]) -> R: + return x.x()(42) # OK + +[case testTypeVariableClashMethodTuple] +from typing import Generic, TypeVar, Callable, Tuple + +T = TypeVar("T") +R = TypeVar("R") +class C(Generic[R]): + def x(self) -> Callable[[T], Tuple[R, T]]: ... + +def func(x: C[R]) -> R: + if bool(): + return x.x()(42)[0] # OK + else: + return x.x()(42)[1] # E: Incompatible return value type (got "int", expected "R") +[builtins fixtures/tuple.pyi] + +[case testTypeVariableClashVarSelf] +from typing import Self, TypeVar, Generic, Callable + +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[T]): + x: Callable[[S], Self] + y: T + +def foo(x: C[T]) -> T: + return x.x(42).y # OK + +[case testNestedGenericFunctionTypeApplication] +from typing import TypeVar, Generic, List + +A = TypeVar("A") +B = TypeVar("B") + +class C(Generic[A]): + x: A + +def foo(x: A) -> A: + def bar() -> List[A]: + y = C[List[A]]() + z = C[List[B]]() # E: Type variable "__main__.B" is unbound \ + # N: (Hint: Use "Generic[B]" or "Protocol[B]" base class to bind "B" inside a class) \ + # N: (Hint: Use "B" in function signature to bind "B" inside a function) + return y.x + return bar()[0] + + +-- TypeVar imported from typing_extensions +-- --------------------------------------- + +[case testTypeVarTypingExtensionsSimpleGeneric] +from typing import Generic +from typing_extensions import TypeVar + +T = TypeVar("T") + +class A(Generic[T]): + def __init__(self, value: T) -> None: + self.value = value + +a: A = A(8) +b: A[str] = A("") + +reveal_type(A(1.23)) # N: Revealed type is "__main__.A[builtins.float]" + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTypingExtensionsSimpleBound] +from typing_extensions import TypeVar + +T= TypeVar("T") + +def func(var: T) -> T: + return var + +reveal_type(func(1)) # N: Revealed type is "builtins.int" + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-incomplete-fixture.test b/test-data/unit/check-incomplete-fixture.test index f06dad293184..146494df1bd6 100644 --- a/test-data/unit/check-incomplete-fixture.test +++ b/test-data/unit/check-incomplete-fixture.test @@ -12,14 +12,6 @@ import m m.x # E: "object" has no attribute "x" [file m.py] -[case testDictMissingFromStubs] -from typing import Dict -def f(x: Dict[int]) -> None: pass -[out] -main:1: error: Module "typing" has no attribute "Dict" -main:1: note: Maybe your test fixture does not define "builtins.dict"? -main:1: note: Consider adding [builtins fixtures/dict.pyi] to your test description - [case testSetMissingFromStubs] from typing import Set def f(x: Set[int]) -> None: pass diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index 44452e2072b3..ec0c5d5e4805 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -1025,10 +1025,7 @@ import a.b [file a/b.py] -[rechecked b] -[stale] -[out2] -tmp/b.py:4: error: Name "a" already defined on line 3 +[stale b] [case testIncrementalSilentImportsAndImportsInClass] # flags: --ignore-missing-imports @@ -2510,7 +2507,7 @@ A = Tuple[int] [case testNewTypeFromForwardNamedTupleIncremental] from typing import NewType, NamedTuple, Tuple -NT = NewType('NT', N) +NT = NewType('NT', 'N') class N(NamedTuple): x: int @@ -2594,8 +2591,8 @@ class C(NamedTuple): # type: ignore from typing import TypeVar, Generic T = TypeVar('T') S = TypeVar('S') -IntNode = Node[int, S] -AnyNode = Node[S, T] +IntNode = Node[int, S] # type: ignore[used-before-def] +AnyNode = Node[S, T] # type: ignore[used-before-def] class Node(Generic[T, S]): def __init__(self, x: T, y: S) -> None: @@ -2645,8 +2642,8 @@ class G(Generic[T]): x: T yg: G[M] -z: int = G[M]().x.x -z = G[M]().x[0] +z: int = G[M]().x.x # type: ignore[used-before-def] +z = G[M]().x[0] # type: ignore[used-before-def] M = NamedTuple('M', [('x', int)]) [builtins fixtures/tuple.pyi] [out] @@ -3544,11 +3541,11 @@ class Bar(Baz): pass [file c.py] class Baz: - def __init__(self): + def __init__(self) -> None: self.x = 12 # type: int [file c.py.2] class Baz: - def __init__(self): + def __init__(self) -> None: self.x = 'lol' # type: str [out] [out2] @@ -4600,7 +4597,7 @@ def outer() -> None: [out2] [case testRecursiveAliasImported] - +# flags: --disable-recursive-aliases import a [file a.py] @@ -5730,6 +5727,7 @@ class C: tmp/a.py:2: error: "object" has no attribute "xyz" [case testIncrementalInvalidNamedTupleInUnannotatedFunction] +# flags: --disable-error-code=annotation-unchecked import a [file a.py] @@ -5759,7 +5757,7 @@ class C: [builtins fixtures/tuple.pyi] [case testNamedTupleUpdateNonRecursiveToRecursiveCoarse] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -5802,7 +5800,7 @@ tmp/c.py:5: error: Incompatible types in assignment (expression has type "Option tmp/c.py:7: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int, fallback=b.M], None], builtins.int, fallback=a.N]" [case testTupleTypeUpdateNonRecursiveToRecursiveCoarse] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -5835,7 +5833,7 @@ tmp/c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") [case testTypeAliasUpdateNonRecursiveToRecursiveCoarse] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -5868,7 +5866,7 @@ tmp/c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") [case testTypedDictUpdateNonRecursiveToRecursiveCoarse] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -5911,6 +5909,38 @@ tmp/c.py:4: note: Revealed type is "TypedDict('a.N', {'r': Union[TypedDict('b.M' tmp/c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") tmp/c.py:7: note: Revealed type is "TypedDict('a.N', {'r': Union[TypedDict('b.M', {'r': Union[..., None], 'x': builtins.int}), None], 'x': builtins.int})" +[case testIncrementalAddClassMethodPlugin] +# flags: --config-file tmp/mypy.ini +import b + +[file mypy.ini] +\[mypy] +plugins=/test-data/unit/plugins/add_classmethod.py + +[file a.py] +class BaseAddMethod: pass + +class MyClass(BaseAddMethod): + pass + +[file b.py] +import a + +[file b.py.2] +import a + +my_class = a.MyClass() +reveal_type(a.MyClass.foo_classmethod) +reveal_type(a.MyClass.foo_staticmethod) +reveal_type(my_class.foo_classmethod) +reveal_type(my_class.foo_staticmethod) + +[rechecked b] +[out2] +tmp/b.py:4: note: Revealed type is "def ()" +tmp/b.py:5: note: Revealed type is "def (builtins.int) -> builtins.str" +tmp/b.py:6: note: Revealed type is "def ()" +tmp/b.py:7: note: Revealed type is "def (builtins.int) -> builtins.str" [case testGenericNamedTupleSerialization] import b [file a.py] @@ -5957,3 +5987,419 @@ s: str = td["value"] [out] [out2] tmp/b.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") + +[case testUnpackKwargsSerialize] +import m +[file lib.py] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +def foo(**kwargs: Unpack[Person]): + ... + +[file m.py] +from lib import foo +foo(name='Jennifer', age=38) +[file m.py.2] +from lib import foo +foo(name='Jennifer', age="38") +[builtins fixtures/dict.pyi] +[out] +[out2] +tmp/m.py:2: error: Argument "age" to "foo" has incompatible type "str"; expected "int" + +[case testDisableEnableErrorCodesIncremental] +# flags: --disable-error-code truthy-bool +# flags2: --enable-error-code truthy-bool +class Foo: + pass + +foo = Foo() +if foo: + ... +[out] +[out2] +main:7: error: "__main__.foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context + +[case testModuleAsProtocolImplementationSerialize] +import m +[file m.py] +from typing import Protocol +from lib import C + +class Options(Protocol): + timeout: int + def update(self) -> bool: ... + +def setup(options: Options) -> None: ... +setup(C().config) + +[file lib.py] +import default_config + +class C: + config = default_config + +[file default_config.py] +timeout = 100 +def update() -> bool: ... + +[file default_config.py.2] +timeout = 100 +def update() -> str: ... +[builtins fixtures/module.pyi] +[out] +[out2] +tmp/m.py:9: error: Argument 1 to "setup" has incompatible type Module; expected "Options" +tmp/m.py:9: note: Following member(s) of Module "default_config" have conflicts: +tmp/m.py:9: note: Expected: +tmp/m.py:9: note: def update() -> bool +tmp/m.py:9: note: Got: +tmp/m.py:9: note: def update() -> str + +[case testAbstractBodyTurnsEmptyCoarse] +# flags: --strict-optional +from b import Base + +class Sub(Base): + def meth(self) -> int: + return super().meth() + +[file b.py] +from abc import abstractmethod +class Base: + @abstractmethod + def meth(self) -> int: return 0 + +[file b.py.2] +from abc import abstractmethod +class Base: + @abstractmethod + def meth(self) -> int: ... +[out] +[out2] +main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe + +[case testNoCrashDoubleReexportFunctionEmpty] +import m + +[file m.py] +import f +[file m.py.3] +import f +# modify + +[file f.py] +import c +def foo(arg: c.C) -> None: pass + +[file c.py] +from types import C + +[file types.py] +import pb1 +C = pb1.C +[file types.py.2] +import pb1, pb2 +C = pb2.C + +[file pb1.py] +class C: ... +[file pb2.py.2] +class C: ... +[file pb1.py.2] +[out] +[out2] +[out3] + +[case testNoCrashDoubleReexportBaseEmpty] +import m + +[file m.py] +import f +[file m.py.3] +import f +# modify + +[file f.py] +import c +class D(c.C): pass + +[file c.py] +from types import C + +[file types.py] +import pb1 +C = pb1.C +[file types.py.2] +import pb1, pb2 +C = pb2.C + +[file pb1.py] +class C: ... +[file pb2.py.2] +class C: ... +[file pb1.py.2] +[out] +[out2] +[out3] + +[case testNoCrashDoubleReexportMetaEmpty] +import m + +[file m.py] +import f +[file m.py.3] +import f +# modify + +[file f.py] +import c +class D(metaclass=c.C): pass + +[file c.py] +from types import C + +[file types.py] +import pb1 +C = pb1.C +[file types.py.2] +import pb1, pb2 +C = pb2.C + +[file pb1.py] +class C(type): ... +[file pb2.py.2] +class C(type): ... +[file pb1.py.2] +[out] +[out2] +[out3] + +[case testNoCrashDoubleReexportTypedDictEmpty] +import m + +[file m.py] +import f +[file m.py.3] +import f +# modify + +[file f.py] +from typing_extensions import TypedDict +import c +class D(TypedDict): + x: c.C + +[file c.py] +from types import C + +[file types.py] +import pb1 +C = pb1.C +[file types.py.2] +import pb1, pb2 +C = pb2.C + +[file pb1.py] +class C: ... +[file pb2.py.2] +class C: ... +[file pb1.py.2] +[builtins fixtures/dict.pyi] +[out] +[out2] +[out3] + +[case testNoCrashDoubleReexportTupleEmpty] +import m + +[file m.py] +import f +[file m.py.3] +import f +# modify + +[file f.py] +from typing import Tuple +import c +class D(Tuple[c.C, int]): pass + +[file c.py] +from types import C + +[file types.py] +import pb1 +C = pb1.C +[file types.py.2] +import pb1, pb2 +C = pb2.C + +[file pb1.py] +class C: ... +[file pb2.py.2] +class C: ... +[file pb1.py.2] +[builtins fixtures/tuple.pyi] +[out] +[out2] +[out3] + +[case testNoCrashDoubleReexportOverloadEmpty] +import m + +[file m.py] +import f +[file m.py.3] +import f +# modify + +[file f.py] +from typing import Any, overload +import c + +@overload +def foo(arg: int) -> None: ... +@overload +def foo(arg: c.C) -> None: ... +def foo(arg: Any) -> None: + pass + +[file c.py] +from types import C + +[file types.py] +import pb1 +C = pb1.C +[file types.py.2] +import pb1, pb2 +C = pb2.C + +[file pb1.py] +class C: ... +[file pb2.py.2] +class C: ... +[file pb1.py.2] +[out] +[out2] +[out3] + +[case testNoCrashOnPartialLambdaInference] +import m +[file m.py] +from typing import TypeVar, Callable + +V = TypeVar("V") +def apply(val: V, func: Callable[[V], None]) -> None: + return func(val) + +xs = [] +apply(0, lambda a: xs.append(a)) +[file m.py.2] +from typing import TypeVar, Callable + +V = TypeVar("V") +def apply(val: V, func: Callable[[V], None]) -> None: + return func(val) + +xs = [] +apply(0, lambda a: xs.append(a)) +reveal_type(xs) +[builtins fixtures/list.pyi] +[out] +[out2] +tmp/m.py:9: note: Revealed type is "builtins.list[builtins.int]" + +[case testTypingSelfCoarse] +import m +[file lib.py] +from typing import Self + +class C: + def meth(self, other: Self) -> Self: ... + +[file m.py] +import lib +class D: ... +[file m.py.2] +import lib +class D(lib.C): ... + +reveal_type(D.meth) +reveal_type(D().meth) +[out] +[out2] +tmp/m.py:4: note: Revealed type is "def [Self <: lib.C] (self: Self`0, other: Self`0) -> Self`0" +tmp/m.py:5: note: Revealed type is "def (other: m.D) -> m.D" + +[case testIncrementalNestedGenericCallableCrash] +from typing import TypeVar, Callable + +T = TypeVar("T") + +class B: + def foo(self) -> Callable[[T], T]: ... + +class C(B): + def __init__(self) -> None: + self.x = self.foo() +[out] +[out2] + +[case testNoCrashIncrementalMetaAny] +import a +[file a.py] +from m import Foo +[file a.py.2] +from m import Foo +# touch +[file m.py] +from missing_module import Meta # type: ignore[import] +class Foo(metaclass=Meta): ... + +[case testIncrementalNativeInt] +import a +[file a.py] +from mypy_extensions import i64 +x: i64 = 0 +[file a.py.2] +from mypy_extensions import i64 +x: i64 = 0 +y: int = x +[builtins fixtures/tuple.pyi] +[out] +[out2] + +[case testGenericTypedDictWithError] +import b +[file a.py] +from typing import Generic, TypeVar +from typing_extensions import TypedDict + +TValue = TypeVar("TValue") +class Dict(TypedDict, Generic[TValue]): + value: TValue + +[file b.py] +from a import Dict, TValue + +def f(d: Dict[TValue]) -> TValue: + return d["value"] +def g(d: Dict[TValue]) -> TValue: + return d["x"] + +[file b.py.2] +from a import Dict, TValue + +def f(d: Dict[TValue]) -> TValue: + return d["value"] +def g(d: Dict[TValue]) -> TValue: + return d["y"] +[builtins fixtures/dict.pyi] +[out] +tmp/b.py:6: error: TypedDict "a.Dict[TValue]" has no key "x" +[out2] +tmp/b.py:6: error: TypedDict "a.Dict[TValue]" has no key "y" diff --git a/test-data/unit/check-inference-context.test b/test-data/unit/check-inference-context.test index 3bab79f5aec2..625ab091a6a9 100644 --- a/test-data/unit/check-inference-context.test +++ b/test-data/unit/check-inference-context.test @@ -7,6 +7,12 @@ [case testBasicContextInference] from typing import TypeVar, Generic T = TypeVar('T') + +def f() -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass + ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -17,15 +23,11 @@ if int(): ab = f() if int(): b = f() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") - -def f() -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass - [case testBasicContextInferenceForConstructor] from typing import TypeVar, Generic T = TypeVar('T') +class A(Generic[T]): pass +class B: pass ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -36,13 +38,16 @@ if int(): ab = A() if int(): b = A() # E: Incompatible types in assignment (expression has type "A[]", variable has type "B") - -class A(Generic[T]): pass -class B: pass - [case testIncompatibleContextInference] from typing import TypeVar, Generic T = TypeVar('T') +def f(a: T) -> 'A[T]': + pass + +class A(Generic[T]): pass + +class B: pass +class C: pass b = None # type: B c = None # type: C ab = None # type: A[B] @@ -63,14 +68,6 @@ if int(): if int(): ac = f(c) -def f(a: T) -> 'A[T]': - pass - -class A(Generic[T]): pass - -class B: pass -class C: pass - -- Local variables -- --------------- @@ -159,6 +156,12 @@ class B: pass [case testInferenceWithTypeVariableTwiceInReturnType] from typing import TypeVar, Tuple, Generic T = TypeVar('T') + +def f(a: T) -> 'Tuple[A[T], A[T]]': pass + +class A(Generic[T]): pass +class B: pass + b = None # type: B o = None # type: object ab = None # type: A[B] @@ -175,17 +178,20 @@ if int(): ab, ab = f(b) if int(): ao, ao = f(o) - -def f(a: T) -> 'Tuple[A[T], A[T]]': pass - -class A(Generic[T]): pass -class B: pass [builtins fixtures/tuple.pyi] [case testInferenceWithTypeVariableTwiceInReturnTypeAndMultipleVariables] from typing import TypeVar, Tuple, Generic S = TypeVar('S') T = TypeVar('T') + +def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass +def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass +def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass + +class A(Generic[T]): pass +class B: pass + b = None # type: B o = None # type: object ab = None # type: A[B] @@ -206,13 +212,6 @@ if int(): ab, ab, ao = g(b, b) if int(): ab, ab, ab, ab = h(b, b) - -def f(a: S, b: T) -> 'Tuple[A[S], A[T], A[T]]': pass -def g(a: S, b: T) -> 'Tuple[A[S], A[S], A[T]]': pass -def h(a: S, b: T) -> 'Tuple[A[S], A[S], A[T], A[T]]': pass - -class A(Generic[T]): pass -class B: pass [builtins fixtures/tuple.pyi] @@ -223,6 +222,13 @@ class B: pass [case testMultipleTvatInstancesInArgs] from typing import TypeVar, Generic T = TypeVar('T') + +def f(a: T, b: T) -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass +class C(B): pass + ac = None # type: A[C] ab = None # type: A[B] ao = None # type: A[object] @@ -246,12 +252,6 @@ if int(): if int(): ab = f(c, b) -def f(a: T, b: T) -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass -class C(B): pass - -- Nested generic function calls -- ----------------------------- @@ -260,6 +260,12 @@ class C(B): pass [case testNestedGenericFunctionCall1] from typing import TypeVar, Generic T = TypeVar('T') + +def f(a: T) -> 'A[T]': pass + +class A(Generic[T]): pass +class B: pass + aab = None # type: A[A[B]] aao = None # type: A[A[object]] ao = None # type: A[object] @@ -273,15 +279,16 @@ if int(): aab = f(f(b)) aao = f(f(b)) ao = f(f(b)) +[case testNestedGenericFunctionCall2] +from typing import TypeVar, Generic +T = TypeVar('T') -def f(a: T) -> 'A[T]': pass +def f(a: T) -> T: pass +def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass -[case testNestedGenericFunctionCall2] -from typing import TypeVar, Generic -T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -293,17 +300,16 @@ if int(): if int(): ab = f(g(b)) ao = f(g(b)) - -def f(a: T) -> T: pass +[case testNestedGenericFunctionCall3] +from typing import TypeVar, Generic +T = TypeVar('T') +def f(a: T, b: T) -> T: + pass def g(a: T) -> 'A[T]': pass class A(Generic[T]): pass class B: pass - -[case testNestedGenericFunctionCall3] -from typing import TypeVar, Generic -T = TypeVar('T') ab = None # type: A[B] ao = None # type: A[object] b = None # type: B @@ -320,14 +326,6 @@ if int(): if int(): ao = f(g(o), g(b)) -def f(a: T, b: T) -> T: - pass - -def g(a: T) -> 'A[T]': pass - -class A(Generic[T]): pass -class B: pass - -- Method calls -- ------------ @@ -339,6 +337,13 @@ T = TypeVar('T') o = None # type: object b = None # type: B c = None # type: C +def f(a: T) -> 'A[T]': pass + +class A(Generic[T]): + def g(self, a: 'A[T]') -> 'A[T]': pass + +class B: pass +class C(B): pass ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] @@ -353,14 +358,6 @@ if int(): ab = f(b).g(f(c)) ab.g(f(c)) -def f(a: T) -> 'A[T]': pass - -class A(Generic[T]): - def g(self, a: 'A[T]') -> 'A[T]': pass - -class B: pass -class C(B): pass - -- List expressions -- ---------------- @@ -461,8 +458,8 @@ class B: pass [case testParenthesesAndContext] from typing import List -l = ([A()]) # type: List[object] class A: pass +l = ([A()]) # type: List[object] [builtins fixtures/list.pyi] [case testComplexTypeInferenceWithTuple] @@ -470,14 +467,15 @@ from typing import TypeVar, Tuple, Generic k = TypeVar('k') t = TypeVar('t') v = TypeVar('v') -def f(x: Tuple[k]) -> 'A[k]': pass - -d = f((A(),)) # type: A[A[B]] class A(Generic[t]): pass class B: pass class C: pass class D(Generic[k, v]): pass + +def f(x: Tuple[k]) -> 'A[k]': pass + +d = f((A(),)) # type: A[A[B]] [builtins fixtures/list.pyi] @@ -505,12 +503,12 @@ d = {A() : a_c, [case testInitializationWithInferredGenericType] from typing import TypeVar, Generic T = TypeVar('T') -c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" def f(x: T) -> T: pass class C(Generic[T]): pass class A: pass +c = f(A()) # type: C[A] # E: Argument 1 to "f" has incompatible type "A"; expected "C[A]" [case testInferredGenericTypeAsReturnValue] from typing import TypeVar, Generic T = TypeVar('T') @@ -544,9 +542,6 @@ class B: pass from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') -x = A() # type: I[int] -a_object = A() # type: A[object] -y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") class I(Generic[t]): @abstractmethod @@ -554,16 +549,20 @@ class I(Generic[t]): class A(I[t], Generic[t]): def f(self): pass +x = A() # type: I[int] +a_object = A() # type: A[object] +y = a_object # type: I[int] # E: Incompatible types in assignment (expression has type "A[object]", variable has type "I[int]") + [case testInferenceWithAbstractClassContext2] from typing import TypeVar, Generic from abc import abstractmethod, ABCMeta t = TypeVar('t') -a = f(A()) # type: A[int] -a_int = A() # type: A[int] -aa = f(a_int) class I(Generic[t]): pass class A(I[t], Generic[t]): pass def f(i: I[t]) -> A[t]: pass +a = f(A()) # type: A[int] +a_int = A() # type: A[int] +aa = f(a_int) [case testInferenceWithAbstractClassContext3] from typing import TypeVar, Generic, Iterable @@ -585,9 +584,9 @@ if int(): from typing import Any, TypeVar, Generic s = TypeVar('s') t = TypeVar('t') +class C(Generic[s, t]): pass x = [] # type: Any y = C() # type: Any -class C(Generic[s, t]): pass [builtins fixtures/list.pyi] @@ -737,6 +736,9 @@ a = m # type: List[A] # E: Incompatible types in assignment (expression has type [case testOrOperationInferredFromContext] from typing import List +class A: pass +class B: pass +class C(B): pass a, b, c = None, None, None # type: (List[A], List[B], List[C]) if int(): a = a or [] @@ -748,10 +750,6 @@ if int(): a = a or b # E: Incompatible types in assignment (expression has type "Union[List[A], List[B]]", variable has type "List[A]") if int(): b = b or c # E: Incompatible types in assignment (expression has type "Union[List[B], List[C]]", variable has type "List[B]") - -class A: pass -class B: pass -class C(B): pass [builtins fixtures/list.pyi] @@ -765,38 +763,38 @@ t = TypeVar('t') s = TypeVar('s') # Some type variables can be inferred using context, but not all of them. a = None # type: List[A] +def f(a: s, b: t) -> List[s]: pass +class A: pass +class B: pass if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" -def f(a: s, b: t) -> List[s]: pass -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testSomeTypeVarsInferredFromContext2] from typing import List, TypeVar s = TypeVar('s') t = TypeVar('t') +def f(a: s, b: t) -> List[s]: pass +class A: pass +class B: pass # Like testSomeTypeVarsInferredFromContext, but tvars in different order. a = None # type: List[A] if int(): a = f(A(), B()) if int(): a = f(B(), B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" -def f(a: s, b: t) -> List[s]: pass -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -map( - [lambda x: x], []) def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass class A: pass +map( + [lambda x: x], []) [builtins fixtures/list.pyi] [out] @@ -814,7 +812,7 @@ if int(): from typing import List class A: def __init__(self): - self.x = [] # type: List[int] + self.x = [] # type: List[int] # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs a = A() a.x = [] a.x = [1] @@ -1419,3 +1417,60 @@ def bar(x: Union[Mapping[Any, Any], Dict[Any, Sequence[Any]]]) -> None: ... bar({1: 2}) [builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall] +# flags: --strict-optional +from typing import Dict, Optional + +d: Dict[str, str] = {} + +def foo(arg: Optional[str] = None) -> None: + if arg is None: + arg = d.get("a", "b") + reveal_type(arg) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall2] +# flags: --strict-optional +from typing import Dict, Optional + +d: Dict[str, str] = {} +x: Optional[str] +if x: + reveal_type(x) # N: Revealed type is "builtins.str" + x = d.get(x, x) + reveal_type(x) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testOptionalTypeNarrowedByGenericCall3] +# flags: --strict-optional +from typing import Generic, TypeVar, Union + +T = TypeVar("T") +def bar(arg: Union[str, T]) -> Union[str, T]: ... + +def foo(arg: Union[str, int]) -> None: + if isinstance(arg, int): + arg = bar("default") + reveal_type(arg) # N: Revealed type is "builtins.str" +[builtins fixtures/isinstance.pyi] + +[case testOptionalTypeNarrowedByGenericCall4] +# flags: --strict-optional +from typing import Optional, List, Generic, TypeVar + +T = TypeVar("T", covariant=True) +class C(Generic[T]): ... + +x: Optional[C[int]] = None +y = x = C() +reveal_type(y) # N: Revealed type is "__main__.C[builtins.int]" + +[case testOptionalTypeNarrowedByGenericCall5] +from typing import Any, Tuple, Union + +i: Union[Tuple[Any, ...], int] +b: Any +i = i if isinstance(i, int) else b +reveal_type(i) # N: Revealed type is "Union[Any, builtins.int]" +[builtins fixtures/isinstance.pyi] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index 04c710af10d1..fc8113766f1a 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -3,7 +3,9 @@ [case testInferSimpleGvarType] -import typing +class A: pass +class B: pass + x = A() y = B() if int(): @@ -14,9 +16,6 @@ if int(): x = y # E: Incompatible types in assignment (expression has type "B", variable has type "A") if int(): x = x -class A: pass -class B: pass - [case testInferSimpleLvarType] import typing def f() -> None: @@ -401,6 +400,8 @@ a = None # type: A b = None # type: B c = None # type: Tuple[A, object] +def id(a: T) -> T: pass + if int(): b = id(a) # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = id(b) # E: Incompatible types in assignment (expression has type "B", variable has type "A") @@ -412,8 +413,6 @@ if int(): b = id(b) c = id(c) -def id(a: T) -> T: pass - class A: pass class B: pass [builtins fixtures/tuple.pyi] @@ -444,20 +443,26 @@ def ff() -> None: x = f() # E: Need type annotation for "x" reveal_type(x) # N: Revealed type is "Any" +def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar +def g(a: T) -> None: pass + g(None) # Ok f() # Ok because not used to infer local variable type g(a) - -def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same Typevar -def g(a: T) -> None: pass [out] [case testInferenceWithMultipleConstraints] from typing import TypeVar + +class A: pass +class B(A): pass + T = TypeVar('T') a = None # type: A b = None # type: B +def f(a: T, b: T) -> T: pass + if int(): b = f(a, b) # E: Incompatible types in assignment (expression has type "A", variable has type "B") if int(): @@ -467,15 +472,16 @@ if int(): if int(): a = f(b, a) -def f(a: T, b: T) -> T: pass - -class A: pass -class B(A): pass - [case testInferenceWithMultipleVariables] from typing import Tuple, TypeVar T = TypeVar('T') S = TypeVar('S') + +def f(a: T, b: S) -> Tuple[T, S]: pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) taa = None # type: Tuple[A, A] tab = None # type: Tuple[A, B] @@ -493,11 +499,6 @@ if int(): tab = f(a, b) if int(): tba = f(b, a) - -def f(a: T, b: S) -> Tuple[T, S]: pass - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] [case testConstraintSolvingWithSimpleGenerics] @@ -507,6 +508,14 @@ ao = None # type: A[object] ab = None # type: A[B] ac = None # type: A[C] +def f(a: 'A[T]') -> 'A[T]': pass + +def g(a: T) -> T: pass + +class A(Generic[T]): pass +class B: pass +class C: pass + if int(): ab = f(ao) # E: Argument 1 to "f" has incompatible type "A[object]"; expected "A[B]" ao = f(ab) # E: Argument 1 to "f" has incompatible type "A[B]"; expected "A[object]" @@ -524,37 +533,33 @@ if int(): if int(): ab = g(ab) ao = g(ao) - -def f(a: 'A[T]') -> 'A[T]': pass - -def g(a: T) -> T: pass - -class A(Generic[T]): pass -class B: pass -class C: pass - [case testConstraintSolvingFailureWithSimpleGenerics] from typing import TypeVar, Generic T = TypeVar('T') ao = None # type: A[object] ab = None # type: A[B] -f(ao, ab) # E: Cannot infer type argument 1 of "f" -f(ab, ao) # E: Cannot infer type argument 1 of "f" -f(ao, ao) -f(ab, ab) - def f(a: 'A[T]', b: 'A[T]') -> None: pass class A(Generic[T]): pass class B: pass + +f(ao, ab) # E: Cannot infer type argument 1 of "f" +f(ab, ao) # E: Cannot infer type argument 1 of "f" +f(ao, ao) +f(ab, ab) [case testTypeInferenceWithCalleeDefaultArgs] from typing import TypeVar T = TypeVar('T') a = None # type: A o = None # type: object +def f(a: T = None) -> T: pass +def g(a: T, b: T = None) -> T: pass + +class A: pass + if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): @@ -569,11 +574,6 @@ if int(): if int(): a = g(a) -def f(a: T = None) -> T: pass -def g(a: T, b: T = None) -> T: pass - -class A: pass - -- Generic function inference with multiple inheritance -- ---------------------------------------------------- @@ -655,6 +655,12 @@ g(c) [case testPrecedenceOfFirstBaseAsInferenceResult] from typing import TypeVar from abc import abstractmethod, ABCMeta +class A: pass +class B(A, I, J): pass +class C(A, I, J): pass + +def f(a: T, b: T) -> T: pass + T = TypeVar('T') a, i, j = None, None, None # type: (A, I, J) @@ -663,11 +669,7 @@ a = f(B(), C()) class I(metaclass=ABCMeta): pass class J(metaclass=ABCMeta): pass -def f(a: T, b: T) -> T: pass -class A: pass -class B(A, I, J): pass -class C(A, I, J): pass [builtins fixtures/tuple.pyi] @@ -966,6 +968,9 @@ list_2 = [f, h] [case testInferenceOfFor1] a, b = None, None # type: (A, B) +class A: pass +class B: pass + for x in [A()]: b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") a = x @@ -973,40 +978,32 @@ for x in [A()]: for y in []: # E: Need type annotation for "y" a = y reveal_type(y) # N: Revealed type is "Any" - -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testInferenceOfFor2] +class A: pass +class B: pass +class C: pass a, b, c = None, None, None # type: (A, B, C) for x, (y, z) in [(A(), (B(), C()))]: - b = x # Fail - c = y # Fail - a = z # Fail + b = x # E: Incompatible types in assignment (expression has type "A", variable has type "B") + c = y # E: Incompatible types in assignment (expression has type "B", variable has type "C") + a = z # E: Incompatible types in assignment (expression has type "C", variable has type "A") a = x b = y c = z -for xx, yy, zz in [(A(), B())]: # Fail +for xx, yy, zz in [(A(), B())]: # E: Need more than 2 values to unpack (3 expected) pass -for xx, (yy, zz) in [(A(), B())]: # Fail +for xx, (yy, zz) in [(A(), B())]: # E: "B" object is not iterable pass for xxx, yyy in [(None, None)]: pass - -class A: pass -class B: pass -class C: pass [builtins fixtures/for.pyi] -[out] -main:4: error: Incompatible types in assignment (expression has type "A", variable has type "B") -main:5: error: Incompatible types in assignment (expression has type "B", variable has type "C") -main:6: error: Incompatible types in assignment (expression has type "C", variable has type "A") -main:10: error: Need more than 2 values to unpack (3 expected) -main:12: error: "B" object is not iterable [case testInferenceOfFor3] +class A: pass +class B: pass a, b = None, None # type: (A, B) @@ -1021,19 +1018,21 @@ for e, f in [[]]: # E: Need type annotation for "e" \ reveal_type(e) # N: Revealed type is "Any" reveal_type(f) # N: Revealed type is "Any" -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testForStatementInferenceWithVoid] -import typing +def f() -> None: pass + for x in f(): # E: "f" does not return a value pass -def f() -> None: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex] import typing + +class A: pass +class B: pass + for a in [A()]: pass a = A() if int(): @@ -1041,8 +1040,6 @@ if int(): for a in []: pass a = A() a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testReusingInferredForIndex2] @@ -1128,15 +1125,15 @@ if int(): class A: pass [case testInferGlobalDefinedInBlock] -import typing +class A: pass +class B: pass + if A: a = A() if int(): a = A() if int(): a = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass [case testAssigningAnyStrToNone] from typing import Tuple, TypeVar @@ -1276,6 +1273,21 @@ class A: def h(x: Callable[[], int]) -> None: pass +[case testLambdaJoinWithDynamicConstructor] +from typing import Any, Union + +class Wrapper: + def __init__(self, x: Any) -> None: ... + +def f(cond: bool) -> Any: + f = Wrapper if cond else lambda x: x + reveal_type(f) # N: Revealed type is "def (x: Any) -> Any" + return f(3) + +def g(cond: bool) -> Any: + f = lambda x: x if cond else Wrapper + reveal_type(f) # N: Revealed type is "def (x: Any) -> Any" + return f(3) -- Boolean operators -- ----------------- @@ -1299,7 +1311,7 @@ class A: pass [case testAccessGlobalVarBeforeItsTypeIsAvailable] import typing -x.y # E: Cannot determine type of "x" +x.y # E: Cannot determine type of "x" # E: Name "x" is used before definition x = object() x.y # E: "object" has no attribute "y" @@ -1475,9 +1487,8 @@ class A: self.x = [] # E: Need type annotation for "x" (hint: "x: List[] = ...") class B(A): - # TODO?: This error is kind of a false positive, unfortunately @property - def x(self) -> List[int]: # E: Signature of "x" incompatible with supertype "A" + def x(self) -> List[int]: # E: Cannot override writeable attribute with read-only property return [123] [builtins fixtures/list.pyi] @@ -1940,6 +1951,13 @@ class A: [out] main:4: error: "None" has no attribute "__iter__" (not iterable) +[case testPartialTypeErrorSpecialCase4] +# This used to crash. +arr = [] +arr.append(arr.append(1)) +[builtins fixtures/list.pyi] +[out] +main:3: error: "append" of "list" does not return a value -- Multipass -- --------- @@ -1963,9 +1981,9 @@ class A: [out] [case testMultipassAndTopLevelVariable] -y = x # E: Cannot determine type of "x" +y = x # E: Cannot determine type of "x" # E: Name "x" is used before definition y() -x = 1+0 +x = 1+int() [out] [case testMultipassAndDecoratedMethod] @@ -2146,7 +2164,7 @@ from typing import TypeVar, Callable T = TypeVar('T') def dec() -> Callable[[T], T]: pass -A.g # E: Cannot determine type of "g" +A.g # E: Cannot determine type of "g" # E: Name "A" is used before definition class A: @classmethod @@ -2341,7 +2359,7 @@ def main() -> None: [case testDontMarkUnreachableAfterInferenceUninhabited] from typing import TypeVar T = TypeVar('T') -def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same Typevar +def f() -> T: pass # E: A function returning TypeVar should receive at least one argument containing the same TypeVar class C: x = f() # E: Need type annotation for "x" @@ -2394,7 +2412,7 @@ if bool(): [case testLocalPartialTypesWithGlobalInitializedToNone] # flags: --local-partial-types -x = None # E: Need type annotation for "x" +x = None # E: Need type annotation for "x" (hint: "x: Optional[] = ...") def f() -> None: global x @@ -2405,7 +2423,7 @@ reveal_type(x) # N: Revealed type is "None" [case testLocalPartialTypesWithGlobalInitializedToNone2] # flags: --local-partial-types -x = None # E: Need type annotation for "x" +x = None # E: Need type annotation for "x" (hint: "x: Optional[] = ...") def f(): global x @@ -2454,7 +2472,7 @@ reveal_type(a) # N: Revealed type is "builtins.str" [case testLocalPartialTypesWithClassAttributeInitializedToNone] # flags: --local-partial-types class A: - x = None # E: Need type annotation for "x" + x = None # E: Need type annotation for "x" (hint: "x: Optional[] = ...") def f(self) -> None: self.x = 1 @@ -2637,7 +2655,7 @@ from typing import List def f(x): pass class A: - x = None # E: Need type annotation for "x" + x = None # E: Need type annotation for "x" (hint: "x: Optional[] = ...") def f(self, p: List[str]) -> None: self.x = f(p) @@ -2647,7 +2665,7 @@ class A: [case testLocalPartialTypesAccessPartialNoneAttribute] # flags: --local-partial-types class C: - a = None # E: Need type annotation for "a" + a = None # E: Need type annotation for "a" (hint: "a: Optional[] = ...") def f(self, x) -> None: C.a.y # E: Item "None" of "Optional[Any]" has no attribute "y" @@ -2655,7 +2673,7 @@ class C: [case testLocalPartialTypesAccessPartialNoneAttribute2] # flags: --local-partial-types class C: - a = None # E: Need type annotation for "a" + a = None # E: Need type annotation for "a" (hint: "a: Optional[] = ...") def f(self, x) -> None: self.a.y # E: Item "None" of "Optional[Any]" has no attribute "y" @@ -2976,13 +2994,14 @@ class C: [case testUnionGenericWithBoundedVariable] from typing import Generic, TypeVar, Union +class A: ... +class B(A): ... + T = TypeVar('T', bound=A) class Z(Generic[T]): def __init__(self, y: T) -> None: self.y = y -class A: ... -class B(A): ... F = TypeVar('F', bound=A) def q1(x: Union[F, Z[F]]) -> F: @@ -3249,6 +3268,10 @@ if x: reveal_type(x) # N: Revealed type is "builtins.bytes" [builtins fixtures/dict.pyi] +[case testSuggestPep604AnnotationForPartialNone] +# flags: --local-partial-types --python-version 3.10 +x = None # E: Need type annotation for "x" (hint: "x: | None = ...") + [case testTupleContextFromIterable] from typing import TypeVar, Iterable, List, Union @@ -3264,3 +3287,115 @@ from typing import Dict, Iterable, Tuple, Union def foo(x: Union[Tuple[str, Dict[str, int], str], Iterable[object]]) -> None: ... foo(("a", {"a": "b"}, "b")) [builtins fixtures/dict.pyi] + +[case testUseSupertypeAsInferenceContext] +# flags: --strict-optional +from typing import List, Optional + +class B: + x: List[Optional[int]] + +class C(B): + x = [1] + +reveal_type(C().x) # N: Revealed type is "builtins.list[Union[builtins.int, None]]" +[builtins fixtures/list.pyi] + +[case testUseSupertypeAsInferenceContextInvalidType] +from typing import List +class P: + x: List[int] +class C(P): + x = ['a'] # E: List item 0 has incompatible type "str"; expected "int" +[builtins fixtures/list.pyi] + +[case testUseSupertypeAsInferenceContextPartial] +from typing import List + +class A: + x: List[str] + +class B(A): + x = [] + +reveal_type(B().x) # N: Revealed type is "builtins.list[builtins.str]" +[builtins fixtures/list.pyi] + +[case testUseSupertypeAsInferenceContextPartialError] +class A: + x = ['a', 'b'] + +class B(A): + x = [] + x.append(2) # E: Argument 1 to "append" of "list" has incompatible type "int"; expected "str" +[builtins fixtures/list.pyi] + +[case testUseSupertypeAsInferenceContextPartialErrorProperty] +from typing import List + +class P: + @property + def x(self) -> List[int]: ... +class C(P): + x = [] + +C.x.append("no") # E: Argument 1 to "append" of "list" has incompatible type "str"; expected "int" +[builtins fixtures/list.pyi] + +[case testUseSupertypeAsInferenceContextConflict] +from typing import List +class P: + x: List[int] +class M: + x: List[str] +class C(P, M): + x = [] # E: Need type annotation for "x" (hint: "x: List[] = ...") +reveal_type(C.x) # N: Revealed type is "builtins.list[Any]" +[builtins fixtures/list.pyi] + +[case testNoPartialInSupertypeAsContext] +class A: + args = {} # E: Need type annotation for "args" (hint: "args: Dict[, ] = ...") + def f(self) -> None: + value = {1: "Hello"} + class B(A): + args = value +[builtins fixtures/dict.pyi] + +[case testInferSimpleLiteralInClassBodyCycle] +import a +[file a.py] +import b +reveal_type(b.B.x) +class A: + x = 42 +[file b.py] +import a +reveal_type(a.A.x) +class B: + x = 42 +[out] +tmp/b.py:2: note: Revealed type is "builtins.int" +tmp/a.py:2: note: Revealed type is "builtins.int" + +[case testUnionTypeCallableInference] +from typing import Callable, Type, TypeVar, Union + +class A: + def __init__(self, x: str) -> None: ... + +T = TypeVar("T") +def type_or_callable(value: T, tp: Union[Type[T], Callable[[int], T]]) -> T: ... +reveal_type(type_or_callable(A("test"), A)) # N: Revealed type is "__main__.A" + +[case testJoinWithAnyFallback] +from unknown import X # type: ignore[import] + +class A: ... +class B(X, A): ... +class C(B): ... +class D(C): ... +class E(D): ... + +reveal_type([E(), D()]) # N: Revealed type is "builtins.list[__main__.D]" +reveal_type([D(), E()]) # N: Revealed type is "builtins.list[__main__.D]" diff --git a/test-data/unit/check-inline-config.test b/test-data/unit/check-inline-config.test index 578d8eff7ff8..1b2085e33e91 100644 --- a/test-data/unit/check-inline-config.test +++ b/test-data/unit/check-inline-config.test @@ -162,3 +162,51 @@ main:1: error: Unrecognized option: skip_file = True # mypy: strict [out] main:1: error: Setting "strict" not supported in inline configuration: specify it in a configuration file instead, or set individual inline flags (see "mypy -h" for the list of flags enabled in strict mode) + +[case testInlineErrorCodes] +# flags: --strict-optional +# mypy: enable-error-code="ignore-without-code,truthy-bool" +class Foo: + pass + +foo = Foo() +if foo: ... # E: "__main__.foo" has type "Foo" which does not implement __bool__ or __len__ so it could always be true in boolean context +42 + "no" # type: ignore # E: "type: ignore" comment without error code (consider "type: ignore[operator]" instead) + +[case testInlineErrorCodesOverrideConfig] +# flags: --strict-optional --config-file tmp/mypy.ini +import foo +import tests.bar +import tests.baz +[file foo.py] +# mypy: disable-error-code="truthy-bool" +class Foo: + pass + +foo = Foo() +if foo: ... +42 + "no" # type: ignore # E: "type: ignore" comment without error code (consider "type: ignore[operator]" instead) + +[file tests/__init__.py] +[file tests/bar.py] +# mypy: enable-error-code="ignore-without-code" + +def foo() -> int: ... +if foo: ... # E: Function "Callable[[], int]" could always be true in boolean context +42 + "no" # type: ignore # E: "type: ignore" comment without error code (consider "type: ignore[operator]" instead) + +[file tests/baz.py] +# mypy: disable-error-code="truthy-bool" +class Foo: + pass + +foo = Foo() +if foo: ... +42 + "no" # type: ignore + +[file mypy.ini] +\[mypy] +enable_error_code = ignore-without-code, truthy-bool + +\[mypy-tests.*] +disable_error_code = ignore-without-code diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 555e1a568d25..0722ee8d91e5 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -1321,8 +1321,7 @@ def f(x: Union[A, B]) -> None: f(x) [builtins fixtures/isinstance.pyi] -[case testIsinstanceWithOverlappingPromotionTypes-skip] -# Currently disabled: see https://github.com/python/mypy/issues/6060 for context +[case testIsinstanceWithOverlappingPromotionTypes] from typing import Union class FloatLike: pass @@ -1751,11 +1750,8 @@ def f(cls: Type[object]) -> None: [case testIsinstanceTypeArgs] from typing import Iterable, TypeVar x = 1 -T = TypeVar('T') - isinstance(x, Iterable) isinstance(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks -isinstance(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks isinstance(x, (int, (str, Iterable[int]))) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstancelist.pyi] @@ -1784,14 +1780,43 @@ isinstance(x, It2) # E: Parameterized generics cannot be used with class or ins [case testIssubclassTypeArgs] from typing import Iterable, TypeVar x = int -T = TypeVar('T') issubclass(x, Iterable) issubclass(x, Iterable[int]) # E: Parameterized generics cannot be used with class or instance checks -issubclass(x, Iterable[T]) # E: Parameterized generics cannot be used with class or instance checks issubclass(x, (int, Iterable[int])) # E: Parameterized generics cannot be used with class or instance checks [builtins fixtures/isinstance.pyi] [typing fixtures/typing-full.pyi] +[case testIssubclassWithMetaclasses] +class FooMetaclass(type): ... +class Foo(metaclass=FooMetaclass): ... +class Bar: ... + +fm: FooMetaclass +reveal_type(fm) # N: Revealed type is "__main__.FooMetaclass" +if issubclass(fm, Foo): + reveal_type(fm) # N: Revealed type is "Type[__main__.Foo]" +if issubclass(fm, Bar): + reveal_type(fm) # N: Revealed type is "None" +[builtins fixtures/isinstance.pyi] + +[case testIssubclassWithMetaclassesStrictOptional] +# flags: --strict-optional +class FooMetaclass(type): ... +class BarMetaclass(type): ... +class Foo(metaclass=FooMetaclass): ... +class Bar(metaclass=BarMetaclass): ... +class Baz: ... + +fm: FooMetaclass +reveal_type(fm) # N: Revealed type is "__main__.FooMetaclass" +if issubclass(fm, Foo): + reveal_type(fm) # N: Revealed type is "Type[__main__.Foo]" +if issubclass(fm, Bar): + reveal_type(fm) # N: Revealed type is "" +if issubclass(fm, Baz): + reveal_type(fm) # N: Revealed type is "" +[builtins fixtures/isinstance.pyi] + [case testIsinstanceAndNarrowTypeVariable] from typing import TypeVar @@ -2362,7 +2387,7 @@ class B: x1: Literal[1] = self.f() def t2(self) -> None: - if isinstance(self, (A0, A1)): # E: Subclass of "B" and "A0" cannot exist: would have incompatible method signatures + if isinstance(self, (A0, A1)): reveal_type(self) # N: Revealed type is "__main__.1" x0: Literal[0] = self.f() # E: Incompatible types in assignment (expression has type "Literal[1]", variable has type "Literal[0]") x1: Literal[1] = self.f() @@ -2698,3 +2723,189 @@ if type(x) is not C: reveal_type(x) # N: Revealed type is "__main__.D" else: reveal_type(x) # N: Revealed type is "__main__.C" + +[case testHasAttrExistingAttribute] +class C: + x: int +c: C +if hasattr(c, "x"): + reveal_type(c.x) # N: Revealed type is "builtins.int" +else: + # We don't mark this unreachable since people may check for deleted attributes + reveal_type(c.x) # N: Revealed type is "builtins.int" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeInstance] +class B: ... +b: B +if hasattr(b, "x"): + reveal_type(b.x) # N: Revealed type is "Any" +else: + b.x # E: "B" has no attribute "x" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeFunction] +def foo(x: int) -> None: ... +if hasattr(foo, "x"): + reveal_type(foo.x) # N: Revealed type is "Any" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeClassObject] +class C: ... +if hasattr(C, "x"): + reveal_type(C.x) # N: Revealed type is "Any" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeTypeType] +from typing import Type +class C: ... +c: Type[C] +if hasattr(c, "x"): + reveal_type(c.x) # N: Revealed type is "Any" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeTypeVar] +from typing import TypeVar + +T = TypeVar("T") +def foo(x: T) -> T: + if hasattr(x, "x"): + reveal_type(x.x) # N: Revealed type is "Any" + return x + else: + return x +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeChained] +class B: ... +b: B +if hasattr(b, "x"): + reveal_type(b.x) # N: Revealed type is "Any" +elif hasattr(b, "y"): + reveal_type(b.y) # N: Revealed type is "Any" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeNested] +class A: ... +class B: ... + +x: A +if hasattr(x, "x"): + if isinstance(x, B): + reveal_type(x.x) # N: Revealed type is "Any" + +if hasattr(x, "x") and hasattr(x, "y"): + reveal_type(x.x) # N: Revealed type is "Any" + reveal_type(x.y) # N: Revealed type is "Any" + +if hasattr(x, "x"): + if hasattr(x, "y"): + reveal_type(x.x) # N: Revealed type is "Any" + reveal_type(x.y) # N: Revealed type is "Any" + +if hasattr(x, "x") or hasattr(x, "y"): + x.x # E: "A" has no attribute "x" + x.y # E: "A" has no attribute "y" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrPreciseType] +class A: ... + +x: A +if hasattr(x, "a") and isinstance(x.a, int): + reveal_type(x.a) # N: Revealed type is "builtins.int" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeUnion] +from typing import Union + +class A: ... +class B: + x: int + +xu: Union[A, B] +if hasattr(xu, "x"): + reveal_type(xu) # N: Revealed type is "Union[__main__.A, __main__.B]" + reveal_type(xu.x) # N: Revealed type is "Union[Any, builtins.int]" +else: + reveal_type(xu) # N: Revealed type is "__main__.A" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeOuterUnion] +from typing import Union + +class A: ... +class B: ... +xu: Union[A, B] +if isinstance(xu, B): + if hasattr(xu, "x"): + reveal_type(xu.x) # N: Revealed type is "Any" + +if isinstance(xu, B) and hasattr(xu, "x"): + reveal_type(xu.x) # N: Revealed type is "Any" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrDoesntInterfereGetAttr] +class C: + def __getattr__(self, attr: str) -> str: ... + +c: C +if hasattr(c, "foo"): + reveal_type(c.foo) # N: Revealed type is "builtins.str" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrMissingAttributeLiteral] +from typing import Final +class B: ... +b: B +ATTR: Final = "x" +if hasattr(b, ATTR): + reveal_type(b.x) # N: Revealed type is "Any" +else: + b.x # E: "B" has no attribute "x" +[builtins fixtures/isinstance.pyi] + +[case testHasAttrDeferred] +def foo() -> str: ... + +class Test: + def stream(self) -> None: + if hasattr(self, "_body"): + reveal_type(self._body) # N: Revealed type is "builtins.str" + + def body(self) -> str: + if not hasattr(self, "_body"): + self._body = foo() + return self._body +[builtins fixtures/isinstance.pyi] + +[case testHasAttrModule] +import mod + +if hasattr(mod, "y"): + reveal_type(mod.y) # N: Revealed type is "Any" + reveal_type(mod.x) # N: Revealed type is "builtins.int" +else: + mod.y # E: Module has no attribute "y" + reveal_type(mod.x) # N: Revealed type is "builtins.int" + +if hasattr(mod, "x"): + mod.y # E: Module has no attribute "y" + reveal_type(mod.x) # N: Revealed type is "builtins.int" +else: + mod.y # E: Module has no attribute "y" + reveal_type(mod.x) # N: Revealed type is "builtins.int" + +[file mod.py] +x: int +[builtins fixtures/module.pyi] + +[case testHasAttrDoesntInterfereModuleGetAttr] +import mod + +if hasattr(mod, "y"): + reveal_type(mod.y) # N: Revealed type is "builtins.str" + +[file mod.py] +def __getattr__(attr: str) -> str: ... +[builtins fixtures/module.pyi] diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index 9f8de1265ee7..e0fe389bbbd9 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -8,23 +8,27 @@ f(o=None()) # E: "None" not callable [case testSimpleKeywordArgument] import typing +class A: pass def f(a: 'A') -> None: pass f(a=A()) f(a=object()) # E: Argument "a" to "f" has incompatible type "object"; expected "A" -class A: pass [case testTwoKeywordArgumentsNotInOrder] import typing +class A: pass +class B: pass def f(a: 'A', b: 'B') -> None: pass f(b=A(), a=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(b=B(), a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "A" f(a=A(), b=B()) f(b=B(), a=A()) -class A: pass -class B: pass [case testOneOfSeveralOptionalKeywordArguments] +# flags: --implicit-optional import typing +class A: pass +class B: pass +class C: pass def f(a: 'A' = None, b: 'B' = None, c: 'C' = None) -> None: pass f(a=A()) f(b=B()) @@ -34,39 +38,34 @@ f(a=B()) # E: Argument "a" to "f" has incompatible type "B"; expected "Optional[ f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(c=B()) # E: Argument "c" to "f" has incompatible type "B"; expected "Optional[C]" f(b=B(), c=A()) # E: Argument "c" to "f" has incompatible type "A"; expected "Optional[C]" -class A: pass -class B: pass -class C: pass - [case testBothPositionalAndKeywordArguments] import typing +class A: pass +class B: pass def f(a: 'A', b: 'B') -> None: pass f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "B" f(A(), b=B()) -class A: pass -class B: pass [case testContextSensitiveTypeInferenceForKeywordArg] from typing import List +class A: pass def f(a: 'A', b: 'List[A]') -> None: pass f(b=[], a=A()) -class A: pass [builtins fixtures/list.pyi] [case testGivingArgumentAsPositionalAndKeywordArg] import typing -def f(a: 'A', b: 'B' = None) -> None: pass -f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass +def f(a: 'A', b: 'B' = None) -> None: pass +f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testGivingArgumentAsPositionalAndKeywordArg2] import typing -def f(a: 'A' = None, b: 'B' = None) -> None: pass -f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass - +def f(a: 'A' = None, b: 'B' = None) -> None: pass +f(A(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testPositionalAndKeywordForSameArg] # This used to crash in check_argument_count(). See #1095. def f(a: int): pass @@ -80,57 +79,57 @@ f(b=object()) # E: Unexpected keyword argument "b" for "f" class A: pass [case testKeywordMisspelling] +class A: pass def f(other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? -class A: pass [case testMultipleKeywordsForMisspelling] -def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other" or "atter"? class A: pass class B: pass +def f(thing : 'A', other: 'A', atter: 'A', btter: 'B') -> None: pass # N: "f" defined here +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "other"? [case testKeywordMisspellingDifferentType] -def f(other: 'A') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +def f(other: 'A') -> None: pass # N: "f" defined here +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? [case testKeywordMisspellingInheritance] -def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here -f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? class A: pass class B(A): pass class C: pass +def f(atter: 'A', btter: 'B', ctter: 'C') -> None: pass # N: "f" defined here +f(otter=B()) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingFloatInt] def f(atter: float, btter: int) -> None: pass # N: "f" defined here x: int = 5 -f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "btter" or "atter"? +f(otter=x) # E: Unexpected keyword argument "otter" for "f"; did you mean "atter" or "btter"? [case testKeywordMisspellingVarArgs] +class A: pass def f(other: 'A', *atter: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? -class A: pass [builtins fixtures/tuple.pyi] [case testKeywordMisspellingOnlyVarArgs] +class A: pass def f(*other: 'A') -> None: pass # N: "f" defined here f(otter=A()) # E: Unexpected keyword argument "otter" for "f" -class A: pass [builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarArgsDifferentTypes] -def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here -f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? class A: pass class B: pass +def f(other: 'B', *atter: 'A') -> None: pass # N: "f" defined here +f(otter=A()) # E: Unexpected keyword argument "otter" for "f"; did you mean "other"? [builtins fixtures/tuple.pyi] [case testKeywordMisspellingVarKwargs] +class A: pass def f(other: 'A', **atter: 'A') -> None: pass f(otter=A()) # E: Missing positional argument "other" in call to "f" -class A: pass [builtins fixtures/dict.pyi] [case testKeywordArgumentsWithDynamicallyTypedCallable] @@ -142,18 +141,15 @@ f(x=None) [case testKeywordArgumentWithFunctionObject] from typing import Callable -f = None # type: Callable[[A, B], None] -f(a=A(), b=B()) -f(A(), b=B()) class A: pass class B: pass -[out] -main:3: error: Unexpected keyword argument "a" -main:3: error: Unexpected keyword argument "b" -main:4: error: Unexpected keyword argument "b" - +f = None # type: Callable[[A, B], None] +f(a=A(), b=B()) # E: Unexpected keyword argument "a" # E: Unexpected keyword argument "b" +f(A(), b=B()) # E: Unexpected keyword argument "b" [case testKeywordOnlyArguments] import typing +class A: pass +class B: pass def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass @@ -176,13 +172,12 @@ i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) +[case testKeywordOnlyArgumentsFastparse] +import typing class A: pass class B: pass -[case testKeywordOnlyArgumentsFastparse] - -import typing def f(a: 'A', *, b: 'B' = None) -> None: pass def g(a: 'A', *, b: 'B') -> None: pass def h(a: 'A', *, b: 'B', aa: 'A') -> None: pass @@ -205,10 +200,6 @@ i(A(), b=B()) i(A(), aa=A()) # E: Missing named argument "b" for "i" i(A(), b=B(), aa=A()) i(A(), aa=A(), b=B()) - -class A: pass -class B: pass - [case testKwargsAfterBareArgs] from typing import Tuple, Any def f(a, *, b=None) -> None: pass @@ -219,7 +210,10 @@ f(a, **b) [builtins fixtures/dict.pyi] [case testKeywordArgAfterVarArgs] +# flags: --implicit-optional import typing +class A: pass +class B: pass def f(*a: 'A', b: 'B' = None) -> None: pass f() f(A()) @@ -230,12 +224,13 @@ f(A(), A(), b=B()) f(B()) # E: Argument 1 to "f" has incompatible type "B"; expected "A" f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testKeywordArgAfterVarArgsWithBothCallerAndCalleeVarArgs] +# flags: --implicit-optional --no-strict-optional from typing import List +class A: pass +class B: pass def f(*a: 'A', b: 'B' = None) -> None: pass a = None # type: List[A] f(*a) @@ -246,18 +241,16 @@ f(A(), *a, b=B()) f(A(), B()) # E: Argument 2 to "f" has incompatible type "B"; expected "A" f(A(), b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" f(*a, b=A()) # E: Argument "b" to "f" has incompatible type "A"; expected "Optional[B]" -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallingDynamicallyTypedFunctionWithKeywordArgs] import typing +class A: pass def f(x, y=A()): pass # N: "f" defined here f(x=A(), y=A()) f(y=A(), x=A()) f(y=A()) # E: Missing positional argument "x" in call to "f" f(A(), z=A()) # E: Unexpected keyword argument "z" for "f" -class A: pass [case testKwargsArgumentInFunctionBody] from typing import Dict, Any @@ -281,6 +274,8 @@ class A: pass [case testCallingFunctionThatAcceptsVarKwargs] import typing +class A: pass +class B: pass def f( **kwargs: 'A') -> None: pass f() f(x=A()) @@ -288,12 +283,12 @@ f(y=A(), z=A()) f(x=B()) # E: Argument "x" to "f" has incompatible type "B"; expected "A" f(A()) # E: Too many arguments for "f" # Perhaps a better message would be "Too many *positional* arguments..." -class A: pass -class B: pass [builtins fixtures/dict.pyi] [case testCallingFunctionWithKeywordVarArgs] from typing import Dict +class A: pass +class B: pass def f( **kwargs: 'A') -> None: pass d = None # type: Dict[str, A] f(**d) @@ -302,8 +297,6 @@ d2 = None # type: Dict[str, B] f(**d2) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(x=A(), **d2) # E: Argument 2 to "f" has incompatible type "**Dict[str, B]"; expected "A" f(**{'x': B()}) # E: Argument 1 to "f" has incompatible type "**Dict[str, B]"; expected "A" -class A: pass -class B: pass [builtins fixtures/dict.pyi] [case testKwargsAllowedInDunderCall] @@ -352,11 +345,11 @@ class A: pass [case testInvalidTypeForKeywordVarArg] # flags: --strict-optional from typing import Dict, Any, Optional +class A: pass def f(**kwargs: 'A') -> None: pass d = {} # type: Dict[A, A] f(**d) # E: Keywords must be strings f(**A()) # E: Argument after ** must be a mapping, not "A" -class A: pass kwargs: Optional[Any] f(**kwargs) # E: Argument after ** must be a mapping, not "Optional[Any]" [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-literal.test b/test-data/unit/check-literal.test index b6eae1da7d84..d523e5c08af8 100644 --- a/test-data/unit/check-literal.test +++ b/test-data/unit/check-literal.test @@ -1484,16 +1484,17 @@ Alias = Literal[3] isinstance(3, Literal[3]) # E: Cannot use isinstance() with Literal type isinstance(3, Alias) # E: Cannot use isinstance() with Literal type \ - # E: Argument 2 to "isinstance" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "isinstance" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" isinstance(3, Renamed[3]) # E: Cannot use isinstance() with Literal type isinstance(3, indirect.Literal[3]) # E: Cannot use isinstance() with Literal type issubclass(int, Literal[3]) # E: Cannot use issubclass() with Literal type issubclass(int, Alias) # E: Cannot use issubclass() with Literal type \ - # E: Argument 2 to "issubclass" has incompatible type "object"; expected "Union[type, Tuple[Any, ...]]" + # E: Argument 2 to "issubclass" has incompatible type ""; expected "Union[type, Tuple[Any, ...]]" issubclass(int, Renamed[3]) # E: Cannot use issubclass() with Literal type issubclass(int, indirect.Literal[3]) # E: Cannot use issubclass() with Literal type [builtins fixtures/isinstancelist.pyi] +[typing fixtures/typing-medium.pyi] [out] [case testLiteralErrorsWhenSubclassed] @@ -2436,23 +2437,10 @@ b: Final = 3 c: Final[Literal[3]] = 3 d: Literal[3] -# TODO: Consider if we want to support cases 'b' and 'd' or not. -# Probably not: we want to mostly keep the 'types' and 'value' worlds distinct. -# However, according to final semantics, we ought to be able to substitute "b" with -# "3" wherever it's used and get the same behavior -- so maybe we do need to support -# at least case "b" for consistency? -a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.a" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.b" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.c" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases -d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid \ - # E: Variable "__main__.d" is not valid as a type \ - # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +a_wrap: Literal[4, a] # E: Parameter 2 of Literal[...] is invalid +b_wrap: Literal[4, b] # E: Parameter 2 of Literal[...] is invalid +c_wrap: Literal[4, c] # E: Parameter 2 of Literal[...] is invalid +d_wrap: Literal[4, d] # E: Parameter 2 of Literal[...] is invalid [builtins fixtures/tuple.pyi] [out] @@ -2516,9 +2504,7 @@ r: Literal[Color.RED] g: Literal[Color.GREEN] b: Literal[Color.BLUE] bad1: Literal[Color] # E: Parameter 1 of Literal[...] is invalid -bad2: Literal[Color.func] # E: Function "__main__.Color.func" is not valid as a type \ - # N: Perhaps you need "Callable[...]" or a callback protocol? \ - # E: Parameter 1 of Literal[...] is invalid +bad2: Literal[Color.func] # E: Parameter 1 of Literal[...] is invalid bad3: Literal[Color.func()] # E: Invalid type: Literal[...] cannot contain arbitrary expressions def expects_color(x: Color) -> None: pass @@ -2918,3 +2904,44 @@ def incorrect_return2() -> Union[Tuple[Literal[True], int], Tuple[Literal[False] else: return (bool(), 'oops') # E: Incompatible return value type (got "Tuple[bool, str]", expected "Union[Tuple[Literal[True], int], Tuple[Literal[False], str]]") [builtins fixtures/bool.pyi] + +[case testLiteralSubtypeContext] +from typing_extensions import Literal + +class A: + foo: Literal['bar', 'spam'] +class B(A): + foo = 'spam' + +reveal_type(B().foo) # N: Revealed type is "Literal['spam']" +[builtins fixtures/tuple.pyi] + +[case testLiteralSubtypeContextNested] +from typing import List +from typing_extensions import Literal + +class A: + foo: List[Literal['bar', 'spam']] +class B(A): + foo = ['spam'] + +reveal_type(B().foo) # N: Revealed type is "builtins.list[Union[Literal['bar'], Literal['spam']]]" +[builtins fixtures/tuple.pyi] + +[case testLiteralSubtypeContextGeneric] +from typing_extensions import Literal +from typing import Generic, List, TypeVar + +T = TypeVar("T", bound=str) + +class B(Generic[T]): + collection: List[T] + word: T + +class C(B[Literal["word"]]): + collection = ["word"] + word = "word" + +reveal_type(C().collection) # N: Revealed type is "builtins.list[Literal['word']]" +reveal_type(C().word) # N: Revealed type is "Literal['word']" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index d83d0470c6b0..4b8308310ae6 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -625,7 +625,11 @@ try: from m import f, g except: def f(x): pass - def g(x): pass # E: All conditional function variants must have identical signatures + def g(x): pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def g(x: Any, y: Any) -> Any \ + # N: Redefinition: \ + # N: def g(x: Any) -> Any [file m.py] def f(x): pass def g(x, y): pass @@ -647,10 +651,29 @@ try: from m import f, g # E: Incompatible import of "g" (imported name has type "Callable[[Any, Any], Any]", local name has type "Callable[[Any], Any]") except: pass + +import m as f # E: Incompatible import of "f" (imported name has type "object", local name has type "Callable[[Any], Any]") + [file m.py] def f(x): pass def g(x, y): pass +[case testRedefineTypeViaImport] +from typing import Type +import mod + +X: Type[mod.A] +Y: Type[mod.B] +from mod import B as X +from mod import A as Y # E: Incompatible import of "Y" (imported name has type "Type[A]", local name has type "Type[B]") + +import mod as X # E: Incompatible import of "X" (imported name has type "object", local name has type "Type[A]") + +[file mod.py] +class A: ... +class B(A): ... + + [case testImportVariableAndAssignNone] try: from m import x @@ -1325,13 +1348,13 @@ import a import b def f() -> int: return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: reveal_type(a.y) return a.y -x = 1 + 1 +x = 1 + int() [out] tmp/b.py:3: note: Revealed type is "builtins.int" @@ -1342,12 +1365,12 @@ import b def f() -> int: reveal_type(b.x) return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: return a.y -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:3: note: Revealed type is "builtins.int" @@ -1362,7 +1385,7 @@ class C: self.x2 = b.b [file b.py] import a -b = 1 + 1 +b = 1 + int() [out] tmp/a.py:4: error: Cannot determine type of "x2" @@ -1375,7 +1398,7 @@ def f() -> None: a + '' [file b.py] import a -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") @@ -1388,7 +1411,7 @@ def f() -> None: a + '' [file b.py] import a -x = 1 + 1 +x = 1 + int() [out] tmp/a.py:4: error: Unsupported operand types for + ("int" and "str") @@ -1401,7 +1424,7 @@ def g() -> None: @b.deco def f(a: str) -> int: pass reveal_type(f) -x = 1 + 1 +x = 1 + int() [file b.py] from typing import Callable, TypeVar import a @@ -1668,11 +1691,11 @@ mod_any: Any = m mod_int: int = m # E: Incompatible types in assignment (expression has type Module, variable has type "int") reveal_type(mod_mod) # N: Revealed type is "types.ModuleType" -mod_mod.a # E: Module has no attribute "a" +reveal_type(mod_mod.a) # N: Revealed type is "Any" reveal_type(mod_mod2) # N: Revealed type is "types.ModuleType" -mod_mod2.a # E: Module has no attribute "a" +reveal_type(mod_mod2.a) # N: Revealed type is "Any" reveal_type(mod_mod3) # N: Revealed type is "types.ModuleType" -mod_mod3.a # E: Module has no attribute "a" +reveal_type(mod_mod3.a) # N: Revealed type is "Any" reveal_type(mod_any) # N: Revealed type is "Any" [file m.py] @@ -1732,7 +1755,7 @@ if bool(): else: x = n -x.a # E: Module has no attribute "a" +reveal_type(x.nope) # N: Revealed type is "Any" reveal_type(x.__file__) # N: Revealed type is "builtins.str" [file m.py] @@ -1783,14 +1806,14 @@ m = n # E: Cannot assign multiple modules to name "m" without explicit "types.M [builtins fixtures/module.pyi] [case testNoReExportFromStubs] -from stub import Iterable # E: Module "stub" has no attribute "Iterable" -from stub import D # E: Module "stub" has no attribute "D" +from stub import Iterable # E: Module "stub" does not explicitly export attribute "Iterable" +from stub import D # E: Module "stub" does not explicitly export attribute "D" from stub import C c = C() reveal_type(c.x) # N: Revealed type is "builtins.int" it: Iterable[int] -reveal_type(it) # N: Revealed type is "Any" +reveal_type(it) # N: Revealed type is "typing.Iterable[builtins.int]" [file stub.pyi] from typing import Iterable @@ -1829,7 +1852,7 @@ class C: import stub reveal_type(stub.y) # N: Revealed type is "builtins.int" -reveal_type(stub.z) # E: Module has no attribute "z" \ +reveal_type(stub.z) # E: Module "stub" does not explicitly export attribute "z" \ # N: Revealed type is "Any" [file stub.pyi] @@ -1880,7 +1903,7 @@ class C: from util import mod reveal_type(mod) # N: Revealed type is "def () -> package.mod.mod" -from util import internal_detail # E: Module "util" has no attribute "internal_detail" +from util import internal_detail # E: Module "util" does not explicitly export attribute "internal_detail" [file package/__init__.pyi] from .mod import mod as mod @@ -1895,7 +1918,7 @@ from package import mod as internal_detail [builtins fixtures/module.pyi] [case testNoReExportUnrelatedModule] -from mod2 import unrelated # E: Module "mod2" has no attribute "unrelated" +from mod2 import unrelated # E: Module "mod2" does not explicitly export attribute "unrelated" [file mod1/__init__.pyi] [file mod1/unrelated.pyi] @@ -1906,7 +1929,7 @@ from mod1 import unrelated [builtins fixtures/module.pyi] [case testNoReExportUnrelatedSiblingPrefix] -from pkg.unrel import unrelated # E: Module "pkg.unrel" has no attribute "unrelated" +from pkg.unrel import unrelated # E: Module "pkg.unrel" does not explicitly export attribute "unrelated" [file pkg/__init__.pyi] [file pkg/unrelated.pyi] @@ -1918,10 +1941,10 @@ from pkg import unrelated [case testNoReExportChildStubs] import mod -from mod import C, D # E: Module "mod" has no attribute "C" +from mod import C, D # E: Module "mod" does not explicitly export attribute "C" reveal_type(mod.x) # N: Revealed type is "mod.submod.C" -mod.C # E: Module has no attribute "C" +mod.C # E: Module "mod" does not explicitly export attribute "C" y = mod.D() reveal_type(y.a) # N: Revealed type is "builtins.str" @@ -1936,7 +1959,7 @@ class D: [builtins fixtures/module.pyi] [case testNoReExportNestedStub] -from stub import substub # E: Module "stub" has no attribute "substub" +from stub import substub # E: Module "stub" does not explicitly export attribute "substub" [file stub.pyi] import substub @@ -2661,12 +2684,13 @@ from foo.bar import x x = 0 [case testClassicNotPackage] +# flags: --no-namespace-packages from foo.bar import x [file foo/bar.py] x = 0 [out] -main:1: error: Cannot find implementation or library stub for module named "foo.bar" -main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports +main:2: error: Cannot find implementation or library stub for module named "foo.bar" +main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports [case testNamespacePackage] # flags: --namespace-packages @@ -2847,7 +2871,7 @@ aaaaa: int [case testModuleAttributeThreeSuggestions] import m -m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aabaa", "aaaba", or "aaaab"? +m.aaaaa # E: Module has no attribute "aaaaa"; maybe "aaaab", "aaaba", or "aabaa"? [file m.py] aaaab: int @@ -2882,10 +2906,10 @@ CustomDict = TypedDict( [builtins fixtures/tuple.pyi] [case testNoReExportFromMissingStubs] -from stub import a # E: Module "stub" has no attribute "a" +from stub import a # E: Module "stub" does not explicitly export attribute "a" from stub import b from stub import c # E: Module "stub" has no attribute "c" -from stub import d # E: Module "stub" has no attribute "d" +from stub import d # E: Module "stub" does not explicitly export attribute "d" [file stub.pyi] from mystery import a, b as b, c as d @@ -2904,6 +2928,20 @@ from . import m as m [file p/m.py] [builtins fixtures/list.pyi] +[case testSpecialModulesNameImplicitAttr] +import typing +import builtins +import abc + +reveal_type(abc.__name__) # N: Revealed type is "builtins.str" +reveal_type(builtins.__name__) # N: Revealed type is "builtins.str" +reveal_type(typing.__name__) # N: Revealed type is "builtins.str" + +[case testSpecialAttrsAreAvaliableInClasses] +class Some: + name = __name__ +reveal_type(Some.name) # N: Revealed type is "builtins.str" + [case testReExportAllInStub] from m1 import C from m1 import D # E: Module "m1" has no attribute "D" @@ -3097,10 +3135,15 @@ from google.cloud import x [case testErrorFromGoogleCloud] import google.cloud from google.cloud import x +import google.non_existent +from google.non_existent import x [out] -main:1: error: Cannot find implementation or library stub for module named "google.cloud" +main:1: error: Library stubs not installed for "google.cloud" +main:1: note: Hint: "python3 -m pip install types-google-cloud-ndb" +main:1: note: (or run "mypy --install-types" to install all missing stub packages) main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports main:1: error: Cannot find implementation or library stub for module named "google" +main:3: error: Cannot find implementation or library stub for module named "google.non_existent" [case testMissingSubmoduleOfInstalledStubPackage] import bleach.xyz diff --git a/test-data/unit/check-namedtuple.test b/test-data/unit/check-namedtuple.test index e4f75f57280c..6b9f139f541c 100644 --- a/test-data/unit/check-namedtuple.test +++ b/test-data/unit/check-namedtuple.test @@ -561,6 +561,7 @@ A # E: Name "A" is not defined [builtins fixtures/tuple.pyi] [case testNamedTupleForwardAsUpperBound] +# flags: --disable-error-code=used-before-def from typing import NamedTuple, TypeVar, Generic T = TypeVar('T', bound='M') class G(Generic[T]): @@ -617,7 +618,7 @@ tmp/b.py:4: note: Revealed type is "Tuple[Any, fallback=a.N]" tmp/b.py:7: note: Revealed type is "Tuple[Any, fallback=a.N]" [case testSimpleSelfReferentialNamedTuple] - +# flags: --disable-recursive-aliases from typing import NamedTuple class MyNamedTuple(NamedTuple): parent: 'MyNamedTuple' # E: Cannot resolve name "MyNamedTuple" (possible cyclic definition) @@ -655,7 +656,7 @@ class B: [out] [case testSelfRefNT1] - +# flags: --disable-recursive-aliases from typing import Tuple, NamedTuple Node = NamedTuple('Node', [ @@ -667,7 +668,7 @@ reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, .. [builtins fixtures/tuple.pyi] [case testSelfRefNT2] - +# flags: --disable-recursive-aliases from typing import Tuple, NamedTuple A = NamedTuple('A', [ @@ -683,7 +684,7 @@ reveal_type(n) # N: Revealed type is "Tuple[builtins.str, builtins.tuple[Any, .. [builtins fixtures/tuple.pyi] [case testSelfRefNT3] - +# flags: --disable-recursive-aliases from typing import NamedTuple, Tuple class B(NamedTuple): @@ -703,7 +704,7 @@ reveal_type(lst[0]) # N: Revealed type is "Tuple[builtins.object, builtins.objec [builtins fixtures/tuple.pyi] [case testSelfRefNT4] - +# flags: --disable-recursive-aliases from typing import NamedTuple class B(NamedTuple): @@ -719,11 +720,11 @@ reveal_type(n.y[0]) # N: Revealed type is "Any" [builtins fixtures/tuple.pyi] [case testSelfRefNT5] - +# flags: --disable-recursive-aliases from typing import NamedTuple B = NamedTuple('B', [ - ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) + ('x', A), # E: Cannot resolve name "A" (possible cyclic definition) # E: Name "A" is used before definition ('y', int), ]) A = NamedTuple('A', [ @@ -737,7 +738,7 @@ reveal_type(f) # N: Revealed type is "def (m: Tuple[Any, builtins.int, fallback= [builtins fixtures/tuple.pyi] [case testRecursiveNamedTupleInBases] - +# flags: --disable-recursive-aliases from typing import List, NamedTuple, Union Exp = Union['A', 'B'] # E: Cannot resolve name "Exp" (possible cyclic definition) \ @@ -781,7 +782,7 @@ tp = NamedTuple('tp', [('x', int)]) [out] [case testSubclassOfRecursiveNamedTuple] - +# flags: --disable-recursive-aliases from typing import List, NamedTuple class Command(NamedTuple): @@ -904,6 +905,7 @@ if not b: [builtins fixtures/tuple.pyi] [case testNamedTupleDoubleForward] +# flags: --disable-error-code=used-before-def from typing import Union, Mapping, NamedTuple class MyBaseTuple(NamedTuple): @@ -1284,7 +1286,7 @@ from typing import NamedTuple, TypeVar T = TypeVar("T") NT = NamedTuple("NT", [("key", int), ("value", T)]) -reveal_type(NT) # N: Revealed type is "def [T] (key: builtins.int, value: T`-1) -> Tuple[builtins.int, T`-1, fallback=__main__.NT[T`-1]]" +reveal_type(NT) # N: Revealed type is "def [T] (key: builtins.int, value: T`1) -> Tuple[builtins.int, T`1, fallback=__main__.NT[T`1]]" nts: NT[str] reveal_type(nts) # N: Revealed type is "Tuple[builtins.int, builtins.str, fallback=__main__.NT[builtins.str]]" @@ -1306,3 +1308,32 @@ class C( [builtins fixtures/tuple.pyi] [typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleSelfItemNotAllowed] +from typing import Self, NamedTuple, Optional + +class NT(NamedTuple): + val: int + next: Optional[Self] # E: Self type cannot be used in NamedTuple item type +NTC = NamedTuple("NTC", [("val", int), ("next", Optional[Self])]) # E: Self type cannot be used in NamedTuple item type +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] + +[case testNamedTupleTypingSelfMethod] +from typing import Self, NamedTuple, TypeVar, Generic + +T = TypeVar("T") +class NT(NamedTuple, Generic[T]): + key: str + val: T + def meth(self) -> Self: + nt: NT[int] + if bool(): + return nt._replace() # E: Incompatible return value type (got "NT[int]", expected "Self") + else: + return self._replace() + +class SNT(NT[int]): ... +reveal_type(SNT("test", 42).meth()) # N: Revealed type is "Tuple[builtins.str, builtins.int, fallback=__main__.SNT]" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-namedtuple.pyi] diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index 30a41ef86d55..f05e2aaf5c19 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -436,8 +436,8 @@ else: weird_mixture: Union[KeyedTypedDict, KeyedNamedTuple] if weird_mixture["key"] is Key.B: # E: No overload variant of "__getitem__" of "tuple" matches argument type "str" \ # N: Possible overload variants: \ - # N: def __getitem__(self, int) -> Literal[Key.C] \ - # N: def __getitem__(self, slice) -> Tuple[Literal[Key.C], ...] + # N: def __getitem__(self, int, /) -> Literal[Key.C] \ + # N: def __getitem__(self, slice, /) -> Tuple[Literal[Key.C], ...] reveal_type(weird_mixture) # N: Revealed type is "Union[TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]}), Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]]" else: reveal_type(weird_mixture) # N: Revealed type is "Union[TypedDict('__main__.KeyedTypedDict', {'key': Literal[__main__.Key.B]}), Tuple[Literal[__main__.Key.C], fallback=__main__.KeyedNamedTuple]]" @@ -1249,3 +1249,14 @@ def two_type_vars(x: Union[str, Dict[str, int], Dict[bool, object], int]) -> Non else: reveal_type(x) # N: Revealed type is "builtins.int" [builtins fixtures/dict.pyi] + + +[case testNarrowingWithDef] +from typing import Callable, Optional + +def g() -> None: + foo: Optional[Callable[[], None]] = None + if foo is None: + def foo(): ... + foo() +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-native-int.test b/test-data/unit/check-native-int.test index 14bea5d715c3..1e945d0af27d 100644 --- a/test-data/unit/check-native-int.test +++ b/test-data/unit/check-native-int.test @@ -120,8 +120,10 @@ reveal_type(x) # N: Revealed type is "mypy_extensions.i32" y = 1 if int(): + # We don't narrow an int down to i32, since they have different + # representations. y = i32(1) - reveal_type(y) # N: Revealed type is "mypy_extensions.i32" + reveal_type(y) # N: Revealed type is "builtins.int" reveal_type(y) # N: Revealed type is "builtins.int" [builtins fixtures/dict.pyi] @@ -149,3 +151,80 @@ def fi32(x: i32) -> None: pass reveal_type(meet(ff, fi32)) # N: Revealed type is "" reveal_type(meet(fi32, ff)) # N: Revealed type is "" [builtins fixtures/dict.pyi] + +[case testNativeIntForLoopRange] +from mypy_extensions import i64, i32 + +for a in range(i64(5)): + reveal_type(a) # N: Revealed type is "mypy_extensions.i64" + +for b in range(0, i32(5)): + reveal_type(b) # N: Revealed type is "mypy_extensions.i32" + +for c in range(i64(0), 5): + reveal_type(c) # N: Revealed type is "mypy_extensions.i64" + +for d in range(i64(0), i64(5)): + reveal_type(d) # N: Revealed type is "mypy_extensions.i64" + +for e in range(i64(0), i32(5)): + reveal_type(e) # N: Revealed type is "builtins.int" + +for f in range(0, i64(3), 2): + reveal_type(f) # N: Revealed type is "mypy_extensions.i64" + +n = 5 +for g in range(0, n, i64(2)): + reveal_type(g) # N: Revealed type is "mypy_extensions.i64" +[builtins fixtures/primitives.pyi] + +[case testNativeIntComprehensionRange] +from mypy_extensions import i64, i32 + +reveal_type([a for a in range(i64(5))]) # N: Revealed type is "builtins.list[mypy_extensions.i64]" +[reveal_type(a) for a in range(0, i32(5))] # N: Revealed type is "mypy_extensions.i32" +[builtins fixtures/primitives.pyi] + +[case testNativeIntNarrowing] +from typing import Union +from mypy_extensions import i64, i32 + +def narrow_i64(x: Union[str, i64]) -> None: + if isinstance(x, i64): + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + + if isinstance(x, str): + reveal_type(x) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "mypy_extensions.i64" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i64]" + +def narrow_i32(x: Union[str, i32]) -> None: + if isinstance(x, i32): + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + + if isinstance(x, str): + reveal_type(x) # N: Revealed type is "builtins.str" + else: + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "mypy_extensions.i32" + else: + reveal_type(x) # N: Revealed type is "builtins.str" + reveal_type(x) # N: Revealed type is "Union[builtins.str, mypy_extensions.i32]" + +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-newsemanal.test b/test-data/unit/check-newsemanal.test index bf612f95b3a2..99f4141a4d64 100644 --- a/test-data/unit/check-newsemanal.test +++ b/test-data/unit/check-newsemanal.test @@ -434,6 +434,7 @@ def main() -> None: x # E: Name "x" is not defined [case testNewAnalyzerCyclicDefinitions] +# flags: --disable-recursive-aliases --disable-error-code used-before-def gx = gy # E: Cannot resolve name "gy" (possible cyclic definition) gy = gx def main() -> None: @@ -520,12 +521,6 @@ reveal_type(b.x) # N: Revealed type is "builtins.int" reveal_type(b.f()) # N: Revealed type is "builtins.str" [case testNewAnalyzerNestedClass2] -b: A.B -b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" -reveal_type(b) # N: Revealed type is "__main__.A.B" -reveal_type(b.x) # N: Revealed type is "builtins.int" -reveal_type(b.f()) # N: Revealed type is "builtins.str" - class A: class B: x: int @@ -536,17 +531,14 @@ class A: def f(self) -> str: return self.x # E: Incompatible return value type (got "int", expected "str") +b: A.B +b = A.B('') # E: Argument 1 to "B" has incompatible type "str"; expected "int" +reveal_type(b) # N: Revealed type is "__main__.A.B" +reveal_type(b.x) # N: Revealed type is "builtins.int" +reveal_type(b.f()) # N: Revealed type is "builtins.str" [case testNewAnalyzerGenerics] from typing import TypeVar, Generic -c: C[int] -c2: C[int, str] # E: "C" expects 1 type argument, but 2 given -c3: C -c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" -reveal_type(c.get()) # N: Revealed type is "builtins.int" -reveal_type(c2) # N: Revealed type is "__main__.C[Any]" -reveal_type(c3) # N: Revealed type is "__main__.C[Any]" - T = TypeVar('T') class C(Generic[T]): @@ -556,6 +548,13 @@ class C(Generic[T]): def get(self) -> T: return self.x +c: C[int] +c2: C[int, str] # E: "C" expects 1 type argument, but 2 given +c3: C +c = C('') # E: Argument 1 to "C" has incompatible type "str"; expected "int" +reveal_type(c.get()) # N: Revealed type is "builtins.int" +reveal_type(c2) # N: Revealed type is "__main__.C[Any]" +reveal_type(c3) # N: Revealed type is "__main__.C[Any]" [case testNewAnalyzerGenericsTypeVarForwardRef] from typing import TypeVar, Generic @@ -576,6 +575,12 @@ reveal_type(c.get()) # N: Revealed type is "builtins.int" [case testNewAnalyzerTypeAlias] from typing import Union, TypeVar, Generic +T = TypeVar('T') +S = TypeVar('S') +class D(Generic[T, S]): pass + +class C: pass + C2 = C U = Union[C, int] G = D[T, C] @@ -586,13 +591,6 @@ u: U reveal_type(u) # N: Revealed type is "Union[__main__.C, builtins.int]" g: G[int] reveal_type(g) # N: Revealed type is "__main__.D[builtins.int, __main__.C]" - -class C: pass - -T = TypeVar('T') -S = TypeVar('S') -class D(Generic[T, S]): pass - [case testNewAnalyzerTypeAlias2] from typing import Union @@ -677,13 +675,14 @@ a.f(1.0) # E: No overload variant of "f" of "A" matches argument type "float" \ # N: def f(self, x: str) -> str [case testNewAnalyzerPromotion] +def f(x: float) -> None: pass y: int f(y) f(1) -def f(x: float) -> None: pass [builtins fixtures/primitives.pyi] [case testNewAnalyzerFunctionDecorator] +# flags: --disable-error-code used-before-def from typing import Callable @dec @@ -701,6 +700,7 @@ reveal_type(f1('')) # N: Revealed type is "builtins.str" f2(1) # E: Argument 1 to "f2" has incompatible type "int"; expected "str" [case testNewAnalyzerTypeVarForwardReference] +# flags: --disable-error-code used-before-def from typing import TypeVar, Generic T = TypeVar('T') @@ -720,7 +720,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -XY = TypeVar('XY', X, Y) +XY = TypeVar('XY', 'X', 'Y') class C(Generic[T]): pass @@ -736,7 +736,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -XY = TypeVar('XY', X, Y) +XY = TypeVar('XY', 'X', 'Y') class C(Generic[T]): pass @@ -754,7 +754,7 @@ y: D[Y] from typing import TypeVar, Generic T = TypeVar('T') -TY = TypeVar('TY', bound=Y) +TY = TypeVar('TY', bound='Y') class C(Generic[T]): pass @@ -774,7 +774,7 @@ class C(Generic[T]): def func(x: U) -> U: ... U = TypeVar('U', asdf, asdf) # E: Name "asdf" is not defined -T = TypeVar('T', bound=asdf) # E: Name "asdf" is not defined +T = TypeVar('T', bound='asdf') # E: Name "asdf" is not defined reveal_type(C) # N: Revealed type is "def [T <: Any] (x: T`1) -> __main__.C[T`1]" reveal_type(func) # N: Revealed type is "def [U in (Any, Any)] (x: U`-1) -> U`-1" @@ -798,16 +798,16 @@ T = TypeVar('T') class A(Generic[T]): pass -a1: A[C] = C() -a2: A[D] = C() \ - # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") - class C(A[C]): pass -class D(A[D]): +class D(A['D']): pass +a1: A[C] = C() +a2: A[D] = C() \ + # E: Incompatible types in assignment (expression has type "C", variable has type "A[D]") + [case testNewAnalyzerTypeVarBoundForwardRef] from typing import TypeVar @@ -854,19 +854,17 @@ def f(): pass [case testNewAnalyzerNamedTupleCall] from typing import NamedTuple -o: Out -i: In +class Other: pass +In = NamedTuple('In', [('s', str), ('t', Other)]) Out = NamedTuple('Out', [('x', In), ('y', Other)]) - +o: Out +i: In reveal_type(o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" reveal_type(o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other, fallback=__main__.In]" reveal_type(o.y) # N: Revealed type is "__main__.Other" reveal_type(o.x.t) # N: Revealed type is "__main__.Other" reveal_type(i.t) # N: Revealed type is "__main__.Other" - -In = NamedTuple('In', [('s', str), ('t', Other)]) -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClass] @@ -935,29 +933,23 @@ class C: [case testNewAnalyzerNamedTupleCallNestedMethod] from typing import NamedTuple -c = C() -reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11], __main__.Other@12, fallback=__main__.C.Out@10]" -reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@11]" - class C: def get_tuple(self) -> None: - self.o: Out - Out = NamedTuple('Out', [('x', In), ('y', Other)]) - In = NamedTuple('In', [('s', str), ('t', Other)]) + Out = NamedTuple('Out', [('x', 'In'), ('y', 'Other')]) + In = NamedTuple('In', [('s', str), ('t', 'Other')]) class Other: pass + self.o: Out + +c = C() +reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@7, fallback=__main__.C.In@6], __main__.Other@7, fallback=__main__.C.Out@5]" +reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@7, fallback=__main__.C.In@6]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassNestedMethod] from typing import NamedTuple -c = C() -reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15], __main__.Other@18, fallback=__main__.C.Out@11]" -reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]" -reveal_type(c.o.method()) # N: Revealed type is "Tuple[builtins.str, __main__.Other@18, fallback=__main__.C.In@15]" - class C: def get_tuple(self) -> None: - self.o: Out class Out(NamedTuple): x: In y: Other @@ -966,6 +958,12 @@ class C: s: str t: Other class Other: pass + self.o: Out + +c = C() +reveal_type(c.o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9], __main__.Other@12, fallback=__main__.C.Out@5]" +reveal_type(c.o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9]" +reveal_type(c.o.method()) # N: Revealed type is "Tuple[builtins.str, __main__.Other@12, fallback=__main__.C.In@9]" [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleClassForwardMethod] @@ -987,34 +985,31 @@ class Other(NamedTuple): [case testNewAnalyzerNamedTupleSpecialMethods] from typing import NamedTuple +class Other: pass +In = NamedTuple('In', [('s', str), ('t', Other)]) +Out = NamedTuple('Out', [('x', In), ('y', Other)]) +class SubO(Out): pass + o: SubO reveal_type(SubO._make) # N: Revealed type is "def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]" reveal_type(o._replace(y=Other())) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.SubO]" - -class SubO(Out): pass - -Out = NamedTuple('Out', [('x', In), ('y', Other)]) -In = NamedTuple('In', [('s', str), ('t', Other)]) -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerNamedTupleBaseClass] from typing import NamedTuple +class Other: pass +class In(NamedTuple): + s: str + t: Other +class Out(NamedTuple('Out', [('x', In), ('y', Other)])): + pass o: Out reveal_type(o) # N: Revealed type is "Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" reveal_type(o.x) # N: Revealed type is "Tuple[builtins.str, __main__.Other, fallback=__main__.In]" reveal_type(o.x.t) # N: Revealed type is "__main__.Other" reveal_type(Out._make) # N: Revealed type is "def (iterable: typing.Iterable[Any], *, new: Any =, len: Any =) -> Tuple[Tuple[builtins.str, __main__.Other, fallback=__main__.In], __main__.Other, fallback=__main__.Out]" - -class Out(NamedTuple('Out', [('x', In), ('y', Other)])): - pass - -class In(NamedTuple): - s: str - t: Other -class Other: pass [builtins fixtures/tuple.pyi] [case testNewAnalyzerIncompleteRefShadowsBuiltin1] @@ -1133,7 +1128,11 @@ class B(type): reveal_type(A.f()) # N: Revealed type is "builtins.int" [case testNewAnalyzerMetaclass2] -reveal_type(A.f()) # N: Revealed type is "builtins.int" +class B(type): + def f(cls) -> int: + return 0 + +class C: pass class A(metaclass=B): pass @@ -1141,12 +1140,7 @@ class A(metaclass=B): class AA(metaclass=C): # E: Metaclasses not inheriting from "type" are not supported pass -class B(type): - def f(cls) -> int: - return 0 - -class C: pass - +reveal_type(A.f()) # N: Revealed type is "builtins.int" [case testNewAnalyzerMetaclassPlaceholder] class B(C): pass @@ -1210,14 +1204,14 @@ class B(type): def f(cls) -> int: return 0 -reveal_type(A.f()) # N: Revealed type is "builtins.int" -reveal_type(A.x) # N: Revealed type is "builtins.str" - class A(six.with_metaclass(B, Defer)): pass class Defer: x: str + +reveal_type(A.f()) # N: Revealed type is "builtins.int" +reveal_type(A.x) # N: Revealed type is "builtins.str" [builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture1] @@ -1251,6 +1245,7 @@ reveal_type(A.x) # N: Revealed type is "builtins.str" [builtins fixtures/tuple.pyi] [case testNewAnalyzerMetaclassFuture4] +# flags: --disable-error-code used-before-def import future.utils class B(type): @@ -1270,31 +1265,32 @@ class Defer: [case testNewAnalyzerFinalDefiningModuleVar] from typing import Final +class D(C): ... +class C: ... + x: Final = C() y: Final[C] = D() bad: Final[D] = C() # E: Incompatible types in assignment (expression has type "C", variable has type "D") reveal_type(x) # N: Revealed type is "__main__.C" reveal_type(y) # N: Revealed type is "__main__.C" -class D(C): ... -class C: ... - [case testNewAnalyzerFinalDefiningInstanceVar] from typing import Final +class D: ... +class E(C): ... + class C: def __init__(self, x: D) -> None: self.x: Final = x self.y: Final[C] = E(D()) reveal_type(C(D()).x) # N: Revealed type is "__main__.D" reveal_type(C(D()).y) # N: Revealed type is "__main__.C" - -class D: ... -class E(C): ... - [case testNewAnalyzerFinalReassignModuleVar] from typing import Final +class A: ... + x: Final = A() x = A() # E: Cannot assign to final name "x" @@ -1307,8 +1303,6 @@ def f2() -> None: def g() -> None: f() -class A: ... - [case testNewAnalyzerFinalReassignModuleReexport] import a [file a.py] @@ -1381,6 +1375,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.A" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyClass3] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -1460,13 +1455,13 @@ from typing import List, TypeVar, Union T = TypeVar('T') x: B[int] -B = A[List[T]] A = Union[int, T] +B = A[List[T]] class C(List[B[int]]): pass +y: C reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.list[builtins.int]]" reveal_type(y[0]) # N: Revealed type is "Union[builtins.int, builtins.list[builtins.int]]" -y: C [builtins fixtures/list.pyi] [case testNewAnalyzerForwardAliasFromUnion] @@ -1487,6 +1482,7 @@ class C: [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyTwoDeferrals] +# flags: --disable-error-code used-before-def from typing import List x: B @@ -1499,6 +1495,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBase] +# flags: --disable-recursive-aliases --disable-error-code used-before-def from typing import List x: B @@ -1509,13 +1506,14 @@ reveal_type(x) reveal_type(x[0][0]) [builtins fixtures/list.pyi] [out] -main:3: error: Cannot resolve name "B" (possible cyclic definition) main:4: error: Cannot resolve name "B" (possible cyclic definition) -main:4: error: Cannot resolve name "C" (possible cyclic definition) -main:7: note: Revealed type is "Any" +main:5: error: Cannot resolve name "B" (possible cyclic definition) +main:5: error: Cannot resolve name "C" (possible cyclic definition) main:8: note: Revealed type is "Any" +main:9: note: Revealed type is "Any" [case testNewAnalyzerAliasToNotReadyTwoDeferralsFunction] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List @@ -1532,6 +1530,7 @@ reveal_type(f) # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.l [builtins fixtures/list.pyi] [case testNewAnalyzerAliasToNotReadyDirectBaseFunction] +# flags: --disable-recursive-aliases --disable-error-code used-before-def import a [file a.py] from typing import List @@ -1555,11 +1554,11 @@ tmp/a.py:5: error: Cannot resolve name "C" (possible cyclic definition) from typing import List, Union x: A -A = Union[B, C] - class B(List[A]): pass class C(List[A]): pass +A = Union[B, C] + reveal_type(x) # N: Revealed type is "Union[__main__.B, __main__.C]" reveal_type(x[0]) # N: Revealed type is "Union[__main__.B, __main__.C]" [builtins fixtures/list.pyi] @@ -1575,19 +1574,18 @@ reveal_type(func()) # N: Revealed type is "builtins.list[Tuple[b.C, b.C]]" from typing import List, Tuple from a import func -B = List[Tuple[C, C]] - -class C(A): ... class A: ... +class C(A): ... +B = List[Tuple[C, C]] [builtins fixtures/list.pyi] [case testNewAnalyzerListComprehension] from typing import List +class A: pass +class B: pass a: List[A] a = [x for x in a] b: List[B] = [x for x in a] # E: List comprehension has incompatible type List[A]; expected List[B] -class A: pass -class B: pass [builtins fixtures/for.pyi] [case testNewAnalyzerDictionaryComprehension] @@ -1793,23 +1791,26 @@ a.y = 1 # E: Incompatible types in assignment (expression has type "int", varia [case testNewAnalyzerAliasesFixedFew] from typing import List, Generic, TypeVar +T = TypeVar('T') +class C(Generic[T]): + ... +A = List[C] +x: A def func(x: List[C[T]]) -> T: ... -x: A -A = List[C] reveal_type(x) # N: Revealed type is "builtins.list[__main__.C[Any]]" reveal_type(func(x)) # N: Revealed type is "Any" -class C(Generic[T]): - ... - -T = TypeVar('T') [builtins fixtures/list.pyi] [case testNewAnalyzerAliasesFixedMany] from typing import List, Generic, TypeVar +T = TypeVar('T') +class C(Generic[T]): + ... + def func(x: List[C[T]]) -> T: ... @@ -1819,9 +1820,7 @@ A = List[C[int, str]] # E: "C" expects 1 type argument, but 2 given reveal_type(x) # N: Revealed type is "builtins.list[__main__.C[Any]]" reveal_type(func(x)) # N: Revealed type is "Any" -class C(Generic[T]): - ... -T = TypeVar('T') + [builtins fixtures/list.pyi] [case testNewAnalyzerBuiltinAliasesFixed] @@ -1864,7 +1863,11 @@ if int(): elif bool(): def f(x: int) -> None: 1() # E: "int" not callable - def g(x: str) -> None: # E: All conditional function variants must have identical signatures + def g(x: str) -> None: # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def g(x: int) -> None \ + # N: Redefinition: \ + # N: def g(x: str) -> None pass else: def f(x: int) -> None: @@ -1881,7 +1884,12 @@ if int(): else: def f(x: A) -> None: 1() # E: "int" not callable - def g(x: str) -> None: # E: All conditional function variants must have identical signatures + def g(x: str) -> None: # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def g(x: A) -> None \ + # N: Redefinition: \ + # N: def g(x: str) -> None + pass reveal_type(g) # N: Revealed type is "def (x: __main__.A)" @@ -1961,7 +1969,7 @@ class A: pass class B: pass class C(B): pass -S = TypeVar('S', bound=Tuple[G[A], ...]) +S = TypeVar('S', bound='Tuple[G[A], ...]') class GG(Generic[S]): pass @@ -2048,12 +2056,12 @@ class C(Tuple[int, str]): class Meta(type): x = int() -y = C.x -reveal_type(y) # N: Revealed type is "builtins.int" - class C(metaclass=Meta): pass +y = C.x +reveal_type(y) # N: Revealed type is "builtins.int" + [case testNewAnalyzerFunctionError] def f(x: asdf) -> None: # E: Name "asdf" is not defined pass @@ -2077,7 +2085,7 @@ from typing import NewType, List x: C reveal_type(x[0]) # N: Revealed type is "__main__.C" -C = NewType('C', B) +C = NewType('C', 'B') class B(List[C]): pass @@ -2089,8 +2097,8 @@ from typing import NewType, List x: D reveal_type(x[0]) # N: Revealed type is "__main__.C" +C = NewType('C', 'B') D = C -C = NewType('C', B) class B(List[D]): pass @@ -2102,32 +2110,33 @@ from typing import NewType, List x: D reveal_type(x[0][0]) # N: Revealed type is "__main__.C" -D = C -C = NewType('C', List[B]) +D = C # E: Name "C" is used before definition +C = NewType('C', 'List[B]') class B(List[C]): pass [builtins fixtures/list.pyi] [case testNewAnalyzerNewTypeForwardClassAliasDirect] +# flags: --disable-recursive-aliases --disable-error-code used-before-def from typing import NewType, List x: D reveal_type(x[0][0]) D = List[C] -C = NewType('C', B) +C = NewType('C', 'B') class B(D): pass [builtins fixtures/list.pyi] [out] -main:3: error: Cannot resolve name "D" (possible cyclic definition) -main:4: note: Revealed type is "Any" -main:6: error: Cannot resolve name "D" (possible cyclic definition) -main:6: error: Cannot resolve name "C" (possible cyclic definition) -main:7: error: Argument 2 to NewType(...) must be a valid type -main:7: error: Cannot resolve name "B" (possible cyclic definition) +main:4: error: Cannot resolve name "D" (possible cyclic definition) +main:5: note: Revealed type is "Any" +main:7: error: Cannot resolve name "D" (possible cyclic definition) +main:7: error: Cannot resolve name "C" (possible cyclic definition) +main:8: error: Argument 2 to NewType(...) must be a valid type +main:8: error: Cannot resolve name "B" (possible cyclic definition) -- Copied from check-classes.test (tricky corner cases). [case testNewAnalyzerNoCrashForwardRefToBrokenDoubleNewTypeClass] @@ -2144,6 +2153,7 @@ class C: [builtins fixtures/dict.pyi] [case testNewAnalyzerForwardTypeAliasInBase] +# flags: --disable-recursive-aliases from typing import List, Generic, TypeVar, NamedTuple T = TypeVar('T') @@ -2164,9 +2174,9 @@ reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=_ [case testNewAnalyzerDuplicateTypeVar] from typing import TypeVar, Generic, Any -T = TypeVar('T', bound=B[Any]) +T = TypeVar('T', bound='B[Any]') # The "int" error is because of typing fixture. -T = TypeVar('T', bound=C) # E: Cannot redefine "T" as a type variable \ +T = TypeVar('T', bound='C') # E: Cannot redefine "T" as a type variable \ # E: Invalid assignment target \ # E: "int" not callable @@ -2179,6 +2189,7 @@ y: B[B[Any]] reveal_type(y.x) # N: Revealed type is "__main__.B[Any]" [case testNewAnalyzerDuplicateTypeVarImportCycle] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import TypeVar, Any @@ -2206,6 +2217,7 @@ tmp/a.py:5: error: Invalid assignment target tmp/a.py:5: error: "int" not callable [case testNewAnalyzerDuplicateTypeVarImportCycleWithAliases] +# flags: --disable-error-code used-before-def import a [file a.py] from typing import TypeVar, Any @@ -2299,6 +2311,7 @@ C = NamedTuple('C', [('x', int)]) [builtins fixtures/tuple.pyi] [case testNewAnalyzerApplicationForward1] +# flags: --disable-error-code used-before-def from typing import Generic, TypeVar x = C[int]() @@ -2321,15 +2334,14 @@ class A: ... [case testNewAnalyzerApplicationForward3] from typing import Generic, TypeVar -x = C[A]() -reveal_type(x) # N: Revealed type is "__main__.C[__main__.A]" - +class A: ... T = TypeVar('T') class C(Generic[T]): ... - -class A: ... +x = C[A]() +reveal_type(x) # N: Revealed type is "__main__.C[__main__.A]" [case testNewAnalyzerApplicationForward4] +# flags: --disable-error-code used-before-def from typing import Generic, TypeVar x = C[A]() # E: Value of type variable "T" of "C" cannot be "A" @@ -2460,6 +2472,9 @@ else: y() # E: "str" not callable [case testNewAnalyzerFirstAliasTargetWins] +class DesiredTarget: + attr: int + if int(): Alias = DesiredTarget else: @@ -2469,12 +2484,8 @@ else: x: Alias reveal_type(x.attr) # N: Revealed type is "builtins.int" - -class DesiredTarget: - attr: int - [case testNewAnalyzerFirstVarDefinitionWins] -x = y +x = y # E: Name "y" is used before definition x = 1 # We want to check that the first definition creates the variable. @@ -2584,6 +2595,7 @@ import n def __getattr__(x): pass [case testNewAnalyzerReportLoopInMRO2] +# flags: --disable-recursive-aliases def f() -> None: class A(A): ... # E: Cannot resolve name "A" (possible cyclic definition) @@ -2948,6 +2960,7 @@ def g() -> None: reveal_type(y) # N: Revealed type is "__main__.G[Any]" [case testNewAnalyzerRedefinedNonlocal] +# flags: --disable-error-code=annotation-unchecked import typing def f(): @@ -2962,7 +2975,7 @@ def g() -> None: def foo() -> None: nonlocal bar - bar = [] # type: typing.List[int] # E: Name "bar" already defined on line 11 + bar = [] # type: typing.List[int] # E: Name "bar" already defined on line 12 [builtins fixtures/list.pyi] [case testNewAnalyzerMoreInvalidTypeVarArgumentsDeferred] diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index a0383a35c623..754c6b52ff19 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -127,6 +127,7 @@ def f(x: None) -> None: pass f(None) [case testInferOptionalFromDefaultNone] +# flags: --implicit-optional def f(x: int = None) -> None: x + 1 # E: Unsupported left operand type for + ("None") \ # N: Left operand is of type "Optional[int]" @@ -135,11 +136,14 @@ f(None) [case testNoInferOptionalFromDefaultNone] # flags: --no-implicit-optional -def f(x: int = None) -> None: # E: Incompatible default for argument "x" (default has type "None", argument has type "int") +def f(x: int = None) -> None: # E: Incompatible default for argument "x" (default has type "None", argument has type "int") \ + # N: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True \ + # N: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase pass [out] [case testInferOptionalFromDefaultNoneComment] +# flags: --implicit-optional def f(x=None): # type: (int) -> None x + 1 # E: Unsupported left operand type for + ("None") \ @@ -149,7 +153,9 @@ f(None) [case testNoInferOptionalFromDefaultNoneComment] # flags: --no-implicit-optional -def f(x=None): # E: Incompatible default for argument "x" (default has type "None", argument has type "int") +def f(x=None): # E: Incompatible default for argument "x" (default has type "None", argument has type "int") \ + # N: PEP 484 prohibits implicit Optional. Accordingly, mypy has changed its default to no_implicit_optional=True \ + # N: Use https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade your codebase # type: (int) -> None pass [out] @@ -396,58 +402,6 @@ reveal_type(None if bool() else 0) # N: Revealed type is "Union[Literal[0]?, No reveal_type([0, None, 0]) # N: Revealed type is "builtins.list[Union[builtins.int, None]]" [builtins fixtures/list.pyi] -[case testOptionalWhitelistSuppressesOptionalErrors] -# flags: --strict-optional-whitelist -import a -import b -[file a.py] -from typing import Optional -x = None # type: Optional[str] -x + "foo" - -[file b.py] -from typing import Optional -x = None # type: Optional[int] -x + 1 - -[builtins fixtures/primitives.pyi] - -[case testOptionalWhitelistPermitsOtherErrors] -# flags: --strict-optional-whitelist -import a -import b -[file a.py] -from typing import Optional -x = None # type: Optional[str] -x + "foo" - -[file b.py] -from typing import Optional -x = None # type: Optional[int] -x + 1 -1 + "foo" -[builtins fixtures/primitives.pyi] -[out] -tmp/b.py:4: error: Unsupported operand types for + ("int" and "str") - -[case testOptionalWhitelistPermitsWhitelistedFiles] -# flags: --strict-optional-whitelist **/a.py -import a -import b -[file a.py] -from typing import Optional -x = None # type: Optional[str] -x + "foo" - -[file b.py] -from typing import Optional -x = None # type: Optional[int] -x + 1 -[builtins fixtures/primitives.pyi] -[out] -tmp/a.py:3: error: Unsupported left operand type for + ("None") -tmp/a.py:3: note: Left operand is of type "Optional[str]" - [case testNoneContextInference] from typing import Dict, List def f() -> List[None]: @@ -1077,3 +1031,12 @@ def f1(b: bool) -> Optional[int]: class Defer: def __init__(self) -> None: self.defer = 10 + +[case testOptionalIterator] +# mypy: no-strict-optional +from typing import Optional, List + +x: Optional[List[int]] +if 3 in x: + pass + diff --git a/test-data/unit/check-overloading.test b/test-data/unit/check-overloading.test index 33ab1a8602be..4209f4ec9164 100644 --- a/test-data/unit/check-overloading.test +++ b/test-data/unit/check-overloading.test @@ -25,6 +25,9 @@ main:6: error: Name "f" already defined on line 2 [case testTypeCheckOverloadWithImplementation] from typing import overload, Any +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -35,14 +38,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypingExtensionsOverload] from typing import Any from typing_extensions import overload +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -53,13 +56,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadNeedsImplementation] from typing import overload, Any + +class A: pass +class B: pass + @overload # E: An overloaded function outside a stub file must have an implementation def f(x: 'A') -> 'B': ... @overload @@ -67,9 +71,6 @@ def f(x: 'B') -> 'A': ... reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testSingleOverloadNoImplementation] @@ -84,6 +85,9 @@ class B: pass [case testOverloadByAnyOtherName] from typing import overload as rose from typing import Any +class A: pass +class B: pass + @rose def f(x: 'A') -> 'B': ... @rose @@ -94,14 +98,14 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithDecoratedImplementation] from typing import overload, Any +class A: pass +class B: pass + def deco(fun): ... @overload @@ -115,9 +119,6 @@ def f(x: Any) -> Any: reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testOverloadDecoratedImplementationNotLast] @@ -174,6 +175,9 @@ class B: pass [case testTypeCheckOverloadWithImplementationError] from typing import overload, Any +class A: pass +class B: pass + @overload def f(x: 'A') -> 'B': ... @overload @@ -196,9 +200,6 @@ def g(x): reveal_type(f(A())) # N: Revealed type is "__main__.B" reveal_type(f(B())) # N: Revealed type is "__main__.A" - -class A: pass -class B: pass [builtins fixtures/isinstance.pyi] [case testTypeCheckOverloadWithUntypedImplAndMultipleVariants] @@ -906,8 +907,8 @@ B() < B() A() < object() # E: Unsupported operand types for < ("A" and "object") B() < object() # E: No overload variant of "__lt__" of "B" matches argument type "object" \ # N: Possible overload variants: \ - # N: def __lt__(self, B) -> int \ - # N: def __lt__(self, A) -> int + # N: def __lt__(self, B, /) -> int \ + # N: def __lt__(self, A, /) -> int [case testOverloadedForwardMethodAndCallingReverseMethod] from foo import * @@ -925,8 +926,8 @@ A() + 1 A() + B() A() + '' # E: No overload variant of "__add__" of "A" matches argument type "str" \ # N: Possible overload variants: \ - # N: def __add__(self, A) -> int \ - # N: def __add__(self, int) -> int + # N: def __add__(self, A, /) -> int \ + # N: def __add__(self, int, /) -> int [case testOverrideOverloadSwapped] from foo import * @@ -4738,12 +4739,12 @@ reveal_type(actually_b + Other()) # Note [out] main:12: error: Signature of "__add__" incompatible with supertype "A" main:12: note: Superclass: -main:12: note: def __add__(self, A) -> A +main:12: note: def __add__(self, A, /) -> A main:12: note: Subclass: main:12: note: @overload -main:12: note: def __add__(self, Other) -> B +main:12: note: def __add__(self, Other, /) -> B main:12: note: @overload -main:12: note: def __add__(self, A) -> A +main:12: note: def __add__(self, A, /) -> A main:12: note: Overloaded operator methods cannot have wider argument types in overrides main:32: note: Revealed type is "__main__.Other" @@ -5986,10 +5987,10 @@ reveal_type(f2(A())) # E: No overload variant of "f2" matches argument type "A" if True: @overload # E: Single overload definition, multiple required def f3(x: A) -> A: ... + def f3(x): ... if maybe_true: # E: Name "maybe_true" is not defined @overload # E: Single overload definition, multiple required def g3(x: B) -> B: ... - def f3(x): ... reveal_type(f3(A())) # N: Revealed type is "__main__.A" if True: @@ -6467,3 +6468,80 @@ spam: Callable[..., str] = lambda x, y: 'baz' reveal_type(func(spam)) # N: Revealed type is "def (*Any, **Any) -> builtins.str" [builtins fixtures/paramspec.pyi] + +[case testGenericOverloadOverlapWithType] +import m + +[file m.pyi] +from typing import TypeVar, Type, overload, Callable + +T = TypeVar("T", bound=str) +@overload +def foo(x: Type[T] | int) -> int: ... +@overload +def foo(x: Callable[[int], bool]) -> str: ... + +[case testGenericOverloadOverlapWithCollection] +import m + +[file m.pyi] +from typing import TypeVar, Sequence, overload, List + +T = TypeVar("T", bound=str) + +@overload +def foo(x: List[T]) -> str: ... +@overload +def foo(x: Sequence[int]) -> int: ... +[builtins fixtures/list.pyi] + +# Also see `check-python38.test` for similar tests with `/` args: +[case testOverloadPositionalOnlyErrorMessageOldStyle] +from typing import overload + +@overload +def foo(__a: int): ... +@overload +def foo(a: str): ... +def foo(a): ... + +foo(a=1) +[out] +main:9: error: No overload variant of "foo" matches argument type "int" +main:9: note: Possible overload variants: +main:9: note: def foo(int, /) -> Any +main:9: note: def foo(a: str) -> Any + +[case testOverloadUnionGenericBounds] +from typing import overload, TypeVar, Sequence, Union + +class Entity: ... +class Assoc: ... + +E = TypeVar("E", bound=Entity) +A = TypeVar("A", bound=Assoc) + +class Test: + @overload + def foo(self, arg: Sequence[E]) -> None: ... + @overload + def foo(self, arg: Sequence[A]) -> None: ... + def foo(self, arg: Union[Sequence[E], Sequence[A]]) -> None: + ... + +[case testOverloadedStaticMethodOnInstance] +from typing import overload + +class Snafu(object): + @overload + @staticmethod + def snafu(value: bytes) -> bytes: ... + @overload + @staticmethod + def snafu(value: str) -> str: ... + @staticmethod + def snafu(value): + ... +reveal_type(Snafu().snafu('123')) # N: Revealed type is "builtins.str" +reveal_type(Snafu.snafu('123')) # N: Revealed type is "builtins.str" +[builtins fixtures/staticmethod.pyi] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 18192b38dc6c..56fc3b6faa14 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -429,7 +429,6 @@ class Z(Generic[P]): ... # literals can be applied n: Z[[int]] -# TODO: type aliases too nt1 = Z[[int]] nt2: TypeAlias = Z[[int]] @@ -506,8 +505,7 @@ def f2(x: X[int, Concatenate[int, P_2]]) -> str: ... # Accepted def f3(x: X[int, [int, bool]]) -> str: ... # Accepted # ellipsis only show up here, but I can assume it works like Callable[..., R] def f4(x: X[int, ...]) -> str: ... # Accepted -# TODO: this is not rejected: -# def f5(x: X[int, int]) -> str: ... # Rejected +def f5(x: X[int, int]) -> str: ... # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" # CASE 3 def bar(x: int, *args: bool) -> int: ... @@ -530,7 +528,7 @@ reveal_type(transform(bar)) # N: Revealed type is "def (builtins.str, *args: bui def expects_int_first(x: Callable[Concatenate[int, P], int]) -> None: ... @expects_int_first # E: Argument 1 to "expects_int_first" has incompatible type "Callable[[str], int]"; expected "Callable[[int], int]" \ - # N: This may be because "one" has arguments named: "x" + # N: This is likely because "one" has named arguments: "x". Consider marking them positional-only def one(x: str) -> int: ... @expects_int_first # E: Argument 1 to "expects_int_first" has incompatible type "Callable[[NamedArg(int, 'x')], int]"; expected "Callable[[int, NamedArg(int, 'x')], int]" @@ -573,7 +571,7 @@ reveal_type(f(n)) # N: Revealed type is "def (builtins.int, builtins.bytes) -> [builtins fixtures/paramspec.pyi] [case testParamSpecConcatenateNamedArgs] -# flags: --strict-concatenate +# flags: --python-version 3.8 --strict-concatenate # this is one noticeable deviation from PEP but I believe it is for the better from typing_extensions import ParamSpec, Concatenate from typing import Callable, TypeVar @@ -595,12 +593,14 @@ def f2(c: Callable[P, R]) -> Callable[Concatenate[int, P], R]: f2(lambda x: 42)(42, x=42) [builtins fixtures/paramspec.pyi] [out] -main:10: error: invalid syntax +main:10: error: invalid syntax; you likely need to run mypy using Python 3.8 or newer [out version>=3.8] main:17: error: Incompatible return value type (got "Callable[[Arg(int, 'x'), **P], R]", expected "Callable[[int, **P], R]") -main:17: note: This may be because "result" has arguments named: "x" +main:17: note: This is likely because "result" has named arguments: "x". Consider marking them positional-only [case testNonStrictParamSpecConcatenateNamedArgs] +# flags: --python-version 3.8 + # this is one noticeable deviation from PEP but I believe it is for the better from typing_extensions import ParamSpec, Concatenate from typing import Callable, TypeVar @@ -622,7 +622,7 @@ def f2(c: Callable[P, R]) -> Callable[Concatenate[int, P], R]: f2(lambda x: 42)(42, x=42) [builtins fixtures/paramspec.pyi] [out] -main:9: error: invalid syntax +main:11: error: invalid syntax; you likely need to run mypy using Python 3.8 or newer [out version>=3.8] [case testParamSpecConcatenateWithTypeVar] @@ -644,6 +644,8 @@ reveal_type(n(42)) # N: Revealed type is "None" [builtins fixtures/paramspec.pyi] [case testCallablesAsParameters] +# flags: --python-version 3.8 + # credits to https://github.com/microsoft/pyright/issues/2705 from typing_extensions import ParamSpec, Concatenate from typing import Generic, Callable, Any @@ -661,9 +663,9 @@ reveal_type(abc) bar(abc) [builtins fixtures/paramspec.pyi] [out] -main:11: error: invalid syntax +main:13: error: invalid syntax; you likely need to run mypy using Python 3.8 or newer [out version>=3.8] -main:14: note: Revealed type is "__main__.Foo[[builtins.int, b: builtins.str]]" +main:16: note: Revealed type is "__main__.Foo[[builtins.int, b: builtins.str]]" [case testSolveParamSpecWithSelfType] from typing_extensions import ParamSpec, Concatenate @@ -753,24 +755,24 @@ class C(Generic[P]): # think PhantomData from rust phantom: Optional[Callable[P, None]] - def add_str(self) -> C[Concatenate[int, P]]: - return C[Concatenate[int, P]]() - - def add_int(self) -> C[Concatenate[str, P]]: + def add_str(self) -> C[Concatenate[str, P]]: return C[Concatenate[str, P]]() + def add_int(self) -> C[Concatenate[int, P]]: + return C[Concatenate[int, P]]() + def f(c: C[P]) -> None: reveal_type(c) # N: Revealed type is "__main__.C[P`-1]" n1 = c.add_str() - reveal_type(n1) # N: Revealed type is "__main__.C[[builtins.int, **P`-1]]" + reveal_type(n1) # N: Revealed type is "__main__.C[[builtins.str, **P`-1]]" n2 = n1.add_int() - reveal_type(n2) # N: Revealed type is "__main__.C[[builtins.str, builtins.int, **P`-1]]" + reveal_type(n2) # N: Revealed type is "__main__.C[[builtins.int, builtins.str, **P`-1]]" p1 = c.add_int() - reveal_type(p1) # N: Revealed type is "__main__.C[[builtins.str, **P`-1]]" + reveal_type(p1) # N: Revealed type is "__main__.C[[builtins.int, **P`-1]]" p2 = p1.add_str() - reveal_type(p2) # N: Revealed type is "__main__.C[[builtins.int, builtins.str, **P`-1]]" + reveal_type(p2) # N: Revealed type is "__main__.C[[builtins.str, builtins.int, **P`-1]]" [builtins fixtures/paramspec.pyi] [case testParamSpecLiteralJoin] @@ -840,9 +842,7 @@ class A: ... reveal_type(A.func) # N: Revealed type is "def [_P, _R] (self: __main__.A, action: def (*_P.args, **_P.kwargs) -> _R`-2, *_P.args, **_P.kwargs) -> _R`-2" - -# TODO: _R` keeps flip-flopping between 5 (?), 13, 14, 15. Spooky. -# reveal_type(A().func) $ N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`13, *_P.args, **_P.kwargs) -> _R`13" +reveal_type(A().func) # N: Revealed type is "def [_P, _R] (action: def (*_P.args, **_P.kwargs) -> _R`5, *_P.args, **_P.kwargs) -> _R`5" def f(x: int) -> int: ... @@ -875,8 +875,7 @@ class A: ... reveal_type(A.func) # N: Revealed type is "def [_P] (self: __main__.A, action: __main__.Job[_P`-1, None]) -> __main__.Job[_P`-1, None]" -# TODO: flakey, _P`4 alternates around. -# reveal_type(A().func) $ N: Revealed type is "def [_P] (action: __main__.Job[_P`4, None]) -> __main__.Job[_P`4, None]" +reveal_type(A().func) # N: Revealed type is "def [_P] (action: __main__.Job[_P`3, None]) -> __main__.Job[_P`3, None]" reveal_type(A().func(Job(lambda x: x))) # N: Revealed type is "__main__.Job[[x: Any], None]" def f(x: int, y: int) -> None: ... @@ -1062,7 +1061,7 @@ def callback(func: Callable[[Any], Any]) -> None: ... class Job(Generic[P]): ... @callback -def run_job(job: Job[...]) -> T: ... # E: A function returning TypeVar should receive at least one argument containing the same Typevar +def run_job(job: Job[...]) -> T: ... # E: A function returning TypeVar should receive at least one argument containing the same TypeVar [builtins fixtures/tuple.pyi] [case testTupleAndDictOperationsOnParamSpecArgsAndKwargs] @@ -1092,3 +1091,383 @@ def func(callback: Callable[P, str]) -> Callable[P, str]: return 'baz' return inner [builtins fixtures/paramspec.pyi] + +[case testUnpackingParamsSpecArgsAndKwargs] +from typing import Callable +from typing_extensions import ParamSpec + +P = ParamSpec("P") + +def func(callback: Callable[P, str]) -> Callable[P, str]: + def inner(*args: P.args, **kwargs: P.kwargs) -> str: + a, *b = args + reveal_type(a) # N: Revealed type is "builtins.object" + reveal_type(b) # N: Revealed type is "builtins.list[builtins.object]" + c, *d = kwargs + reveal_type(c) # N: Revealed type is "builtins.str" + reveal_type(d) # N: Revealed type is "builtins.list[builtins.str]" + e = {**kwargs} + reveal_type(e) # N: Revealed type is "builtins.dict[builtins.str, builtins.object]" + return "foo" + return inner +[builtins fixtures/paramspec.pyi] + +[case testParamSpecArgsAndKwargsMissmatch] +from typing import Callable +from typing_extensions import ParamSpec + +P1 = ParamSpec("P1") + +def func(callback: Callable[P1, str]) -> Callable[P1, str]: + def inner( + *args: P1.kwargs, # E: Use "P1.args" for variadic "*" parameter + **kwargs: P1.args, # E: Use "P1.kwargs" for variadic "**" parameter + ) -> str: + return "foo" + return inner +[builtins fixtures/paramspec.pyi] + +[case testParamSpecTestPropAccess] +from typing import Callable +from typing_extensions import ParamSpec + +P1 = ParamSpec("P1") + +def func1(callback: Callable[P1, str]) -> Callable[P1, str]: + def inner( + *args: P1.typo, # E: Use "P1.args" for variadic "*" parameter \ + # E: Name "P1.typo" is not defined + **kwargs: P1.kwargs, + ) -> str: + return "foo" + return inner + +def func2(callback: Callable[P1, str]) -> Callable[P1, str]: + def inner( + *args: P1.args, + **kwargs: P1.__bound__, # E: Use "P1.kwargs" for variadic "**" parameter \ + # E: Name "P1.__bound__" is not defined + ) -> str: + return "foo" + return inner + +def func3(callback: Callable[P1, str]) -> Callable[P1, str]: + def inner( + *args: P1.__bound__, # E: Use "P1.args" for variadic "*" parameter \ + # E: Name "P1.__bound__" is not defined + **kwargs: P1.invalid, # E: Use "P1.kwargs" for variadic "**" parameter \ + # E: Name "P1.invalid" is not defined + ) -> str: + return "foo" + return inner +[builtins fixtures/paramspec.pyi] + + +[case testInvalidParamSpecDefinitionsWithArgsKwargs] +from typing import Callable, ParamSpec + +P = ParamSpec('P') + +def c1(f: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> int: ... +def c2(f: Callable[P, int]) -> int: ... +def c3(f: Callable[P, int], *args, **kwargs) -> int: ... + +# It is ok to define, +def c4(f: Callable[P, int], *args: int, **kwargs: str) -> int: + # but not ok to call: + f(*args, **kwargs) # E: Argument 1 has incompatible type "*Tuple[int, ...]"; expected "P.args" \ + # E: Argument 2 has incompatible type "**Dict[str, str]"; expected "P.kwargs" + return 1 + +def f1(f: Callable[P, int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f2(f: Callable[P, int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f3(f: Callable[P, int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f4(f: Callable[P, int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f5(f: Callable[P, int], *args: P.args, extra_keyword_arg: int, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" + +# Error message test: +P1 = ParamSpec('P1') + +def m1(f: Callable[P1, int], *a, **k: P1.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testInvalidParamSpecAndConcatenateDefinitionsWithArgsKwargs] +from typing import Callable, ParamSpec +from typing_extensions import Concatenate + +P = ParamSpec('P') + +def c1(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs: P.kwargs) -> int: ... +def c2(f: Callable[Concatenate[int, P], int]) -> int: ... +def c3(f: Callable[Concatenate[int, P], int], *args, **kwargs) -> int: ... + +# It is ok to define, +def c4(f: Callable[Concatenate[int, P], int], *args: int, **kwargs: str) -> int: + # but not ok to call: + f(1, *args, **kwargs) # E: Argument 2 has incompatible type "*Tuple[int, ...]"; expected "P.args" \ + # E: Argument 3 has incompatible type "**Dict[str, str]"; expected "P.kwargs" + return 1 + +def f1(f: Callable[Concatenate[int, P], int], *args, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f2(f: Callable[Concatenate[int, P], int], *args: P.args, **kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f3(f: Callable[Concatenate[int, P], int], *args: P.args) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f4(f: Callable[Concatenate[int, P], int], **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +def f5(f: Callable[Concatenate[int, P], int], *args: P.args, extra_keyword_arg: int, **kwargs: P.kwargs) -> int: ... # E: ParamSpec must have "*args" typed as "P.args" and "**kwargs" typed as "P.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testValidParamSpecInsideGenericWithoutArgsAndKwargs] +from typing import Callable, ParamSpec, Generic +from typing_extensions import Concatenate + +P = ParamSpec('P') + +class Some(Generic[P]): ... + +def create(s: Some[P], *args: int): ... +def update(s: Some[P], **kwargs: int): ... +def delete(s: Some[P]): ... + +def from_callable1(c: Callable[P, int], *args: int, **kwargs: int) -> Some[P]: ... +def from_callable2(c: Callable[P, int], **kwargs: int) -> Some[P]: ... +def from_callable3(c: Callable[P, int], *args: int) -> Some[P]: ... + +def from_extra1(c: Callable[Concatenate[int, P], int], *args: int, **kwargs: int) -> Some[P]: ... +def from_extra2(c: Callable[Concatenate[int, P], int], **kwargs: int) -> Some[P]: ... +def from_extra3(c: Callable[Concatenate[int, P], int], *args: int) -> Some[P]: ... +[builtins fixtures/paramspec.pyi] + + +[case testUnboundParamSpec] +from typing import Callable, ParamSpec + +P1 = ParamSpec('P1') +P2 = ParamSpec('P2') + +def f0(f: Callable[P1, int], *args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +def f1(*args: P1.args): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f2(**kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f3(*args: P1.args, **kwargs: int): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f4(*args: int, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +# Error message is based on the `args` definition: +def f5(*args: P2.args, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P2.args" and "**kwargs" typed as "P2.kwargs" +def f6(*args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" + +# Multiple `ParamSpec` variables can be found, they should not affect error message: +P3 = ParamSpec('P3') + +def f7(first: Callable[P3, int], *args: P1.args, **kwargs: P2.kwargs): ... # E: ParamSpec must have "*args" typed as "P1.args" and "**kwargs" typed as "P1.kwargs" +def f8(first: Callable[P3, int], *args: P2.args, **kwargs: P1.kwargs): ... # E: ParamSpec must have "*args" typed as "P2.args" and "**kwargs" typed as "P2.kwargs" +[builtins fixtures/paramspec.pyi] + + +[case testArgsKwargsWithoutParamSpecVar] +from typing import Generic, Callable, ParamSpec + +P = ParamSpec('P') + +# This must be allowed: +class Some(Generic[P]): + def call(self, *args: P.args, **kwargs: P.kwargs): ... + +# TODO: this probably should be reported. +def call(*args: P.args, **kwargs: P.kwargs): ... +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInferenceCrash] +from typing import Callable, Generic, ParamSpec, TypeVar + +def foo(x: int) -> int: ... +T = TypeVar("T") +def bar(x: T) -> T: ... + +P = ParamSpec("P") + +class C(Generic[P]): + def __init__(self, fn: Callable[P, int], *args: P.args, **kwargs: P.kwargs): ... + +reveal_type(bar(C(fn=foo, x=1))) # N: Revealed type is "__main__.C[[x: builtins.int]]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecClassConstructor] +# flags: --strict-optional +from typing import ParamSpec, Callable + +P = ParamSpec("P") + +class SomeClass: + def __init__(self, a: str) -> None: + pass + +def func(t: Callable[P, SomeClass], val: Callable[P, SomeClass]) -> None: + pass + +def constructor(a: str) -> SomeClass: + return SomeClass(a) + +def wrong_constructor(a: bool) -> SomeClass: + return SomeClass("a") + +func(SomeClass, constructor) +func(SomeClass, wrong_constructor) # E: Argument 1 to "func" has incompatible type "Type[SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" \ + # E: Argument 2 to "func" has incompatible type "Callable[[bool], SomeClass]"; expected "Callable[[VarArg(), KwArg()], SomeClass]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasBasic] +from typing import ParamSpec, Callable + +P = ParamSpec("P") +C = Callable[P, int] +def f(n: C[P]) -> C[P]: ... + +@f +def bar(x: int) -> int: ... +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "Callable[[int], int]" +def foo(x: int) -> str: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasConcatenate] +from typing import ParamSpec, Callable +from typing_extensions import Concatenate + +P = ParamSpec("P") +C = Callable[Concatenate[int, P], int] +def f(n: C[P]) -> C[P]: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[], int]"; expected "Callable[[int], int]" +def bad() -> int: ... + +@f +def bar(x: int) -> int: ... + +@f +def bar2(x: int, y: str) -> int: ... +reveal_type(bar2) # N: Revealed type is "def (builtins.int, y: builtins.str) -> builtins.int" + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "Callable[[int], int]" \ + # N: This is likely because "foo" has named arguments: "x". Consider marking them positional-only +def foo(x: int) -> str: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[str, int], int]"; expected "Callable[[int, int], int]" \ + # N: This is likely because "foo2" has named arguments: "x". Consider marking them positional-only +def foo2(x: str, y: int) -> int: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str) -> builtins.int" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.int, builtins.str) -> builtins.int" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecInTypeAliasRecursive] +from typing import ParamSpec, Callable, Union + +P = ParamSpec("P") +C = Callable[P, Union[int, C[P]]] +def f(n: C[P]) -> C[P]: ... + +@f +def bar(x: int) -> int: ... + +@f +def bar2(__x: int) -> Callable[[int], int]: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], str]"; expected "C[[int]]" +def foo(x: int) -> str: ... + +@f # E: Argument 1 to "f" has incompatible type "Callable[[int], Callable[[int], str]]"; expected "C[[int]]" +def foo2(__x: int) -> Callable[[int], str]: ... + +x: C[[int, str]] +reveal_type(x) # N: Revealed type is "def (builtins.int, builtins.str) -> Union[builtins.int, ...]" +y: C[int, str] +reveal_type(y) # N: Revealed type is "def (builtins.int, builtins.str) -> Union[builtins.int, ...]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasInRuntimeContext] +from typing import ParamSpec, Generic + +P = ParamSpec("P") +class C(Generic[P]): ... + +c = C[int, str]() +reveal_type(c) # N: Revealed type is "__main__.C[[builtins.int, builtins.str]]" + +A = C[P] +a = A[int, str]() +reveal_type(a) # N: Revealed type is "__main__.C[[builtins.int, builtins.str]]" +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasInvalidLocations] +from typing import ParamSpec, Generic, List, TypeVar, Callable + +P = ParamSpec("P") +T = TypeVar("T") +A = List[T] +def f(x: A[[int, str]]) -> None: ... # E: Bracketed expression "[...]" is not valid as a type \ + # N: Did you mean "List[...]"? +def g(x: A[P]) -> None: ... # E: Invalid location for ParamSpec "P" \ + # N: You can use ParamSpec as the first argument to Callable, e.g., 'Callable[P, int]' + +C = Callable[P, T] +x: C[int] # E: Bad number of arguments for type alias, expected: 2, given: 1 +y: C[int, str] # E: Can only replace ParamSpec with a parameter types list or another ParamSpec, got "int" +z: C[int, str, bytes] # E: Bad number of arguments for type alias, expected: 2, given: 3 +[builtins fixtures/paramspec.pyi] + +[case testTrivialParametersHandledCorrectly] +from typing import ParamSpec, Generic, TypeVar, Callable, Any +from typing_extensions import Concatenate + +P = ParamSpec("P") +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[S, P, T]): ... + +def foo(f: Callable[P, int]) -> None: + x: C[Any, ..., Any] + x1: C[int, Concatenate[int, str, P], str] + x = x1 # OK +[builtins fixtures/paramspec.pyi] + +[case testParamSpecAliasNested] +from typing import ParamSpec, Callable, List, TypeVar, Generic +from typing_extensions import Concatenate + +P = ParamSpec("P") +A = List[Callable[P, None]] +B = List[Callable[Concatenate[int, P], None]] + +fs: A[int, str] +reveal_type(fs) # N: Revealed type is "builtins.list[def (builtins.int, builtins.str)]" +gs: B[int, str] +reveal_type(gs) # N: Revealed type is "builtins.list[def (builtins.int, builtins.int, builtins.str)]" + +T = TypeVar("T") +class C(Generic[T]): ... +C[Callable[P, int]]() # E: The first argument to Callable must be a list of types, parameter specification, or "..." \ + # N: See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas +[builtins fixtures/paramspec.pyi] + +[case testConcatDeferralNoCrash] +from typing import Callable, TypeVar +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") +T = TypeVar("T", bound="Defer") + +Alias = Callable[P, bool] +Concat = Alias[Concatenate[T, P]] + +def test(f: Concat[T, ...]) -> None: ... + +class Defer: ... +[builtins fixtures/paramspec.pyi] diff --git a/test-data/unit/check-possibly-undefined.test b/test-data/unit/check-possibly-undefined.test new file mode 100644 index 000000000000..29c4868e97af --- /dev/null +++ b/test-data/unit/check-possibly-undefined.test @@ -0,0 +1,970 @@ +[case testDefinedInOneBranch] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +else: + x = 2 +z = a + 1 # E: Name "a" may be undefined +z = a + 1 # We only report the error on first occurrence. + +[case testElif] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +elif int(): + a = 2 +else: + x = 3 + +z = a + 1 # E: Name "a" may be undefined + +[case testUsedInIf] +# flags: --enable-error-code possibly-undefined +if int(): + y = 1 +if int(): + x = y # E: Name "y" may be undefined + +[case testDefinedInAllBranches] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +elif int(): + a = 2 +else: + a = 3 +z = a + 1 + +[case testOmittedElse] +# flags: --enable-error-code possibly-undefined +if int(): + a = 1 +z = a + 1 # E: Name "a" may be undefined + +[case testUpdatedInIf] +# flags: --enable-error-code possibly-undefined +# Variable a is already defined. Just updating it in an "if" is acceptable. +a = 1 +if int(): + a = 2 +z = a + 1 + +[case testNestedIf] +# flags: --enable-error-code possibly-undefined +if int(): + if int(): + a = 1 + x = 1 + x = x + 1 + else: + a = 2 + b = a + x # E: Name "x" may be undefined + b = b + 1 +else: + b = 2 +z = a + b # E: Name "a" may be undefined + +[case testVeryNestedIf] +# flags: --enable-error-code possibly-undefined +if int(): + if int(): + if int(): + a = 1 + else: + a = 2 + x = a + else: + a = 2 + b = a +else: + b = 2 +z = a + b # E: Name "a" may be undefined + +[case testTupleUnpack] +# flags: --enable-error-code possibly-undefined + +if int(): + (x, y) = (1, 2) +else: + [y, z] = [1, 2] +a = y + x # E: Name "x" may be undefined +a = y + z # E: Name "z" may be undefined + +[case testIndexExpr] +# flags: --enable-error-code possibly-undefined + +if int(): + *x, y = (1, 2) +else: + x = [1, 2] +a = x # No error. +b = y # E: Name "y" may be undefined + +[case testRedefined] +# flags: --enable-error-code possibly-undefined +y = 3 +if int(): + if int(): + y = 2 + x = y + 2 +else: + if int(): + y = 2 + x = y + 2 + +x = y + 2 + +[case testFunction] +# flags: --enable-error-code possibly-undefined +def f0() -> None: + if int(): + def some_func() -> None: + pass + + some_func() # E: Name "some_func" may be undefined + +def f1() -> None: + if int(): + def some_func() -> None: + pass + else: + def some_func() -> None: + pass + + some_func() # No error. + +[case testLambda] +# flags: --enable-error-code possibly-undefined +def f0(b: bool) -> None: + if b: + fn = lambda: 2 + y = fn # E: Name "fn" may be undefined + +[case testUsedBeforeDefClass] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f(x: A): # No error here. + pass +y = A() # E: Name "A" is used before definition +class A: pass + +[case testClassScope] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +class C: + x = 0 + def f0(self) -> None: pass + + def f2(self) -> None: + f0() # No error. + self.f0() # No error. + +f0() # E: Name "f0" is used before definition +def f0() -> None: pass +y = x # E: Name "x" is used before definition +x = 1 + +[case testClassInsideFunction] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f() -> None: + class C: pass + +c = C() # E: Name "C" is used before definition +class C: pass + +[case testUsedBeforeDefFunc] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +foo() # E: Name "foo" is used before definition +def foo(): pass +[case testGenerator] +# flags: --enable-error-code possibly-undefined +if int(): + a = 3 +s = [a + 1 for a in [1, 2, 3]] +x = a # E: Name "a" may be undefined + +[case testScope] +# flags: --enable-error-code possibly-undefined +def foo() -> None: + if int(): + y = 2 + +if int(): + y = 3 +x = y # E: Name "y" may be undefined + +[case testVarDefinedInOuterScopeUpdated] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + global x + y = x + x = 1 # No error. + +x = 2 + +[case testNonlocalVar] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + x = 2 + + def inner() -> None: + nonlocal x + y = x + x = 1 # No error. + + +[case testGlobalDeclarationAfterUsage] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + y = x # E: Name "x" is used before definition + global x + x = 1 # No error. + +x = 2 +[case testVarDefinedInOuterScope] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f0() -> None: + global x + y = x # We do not detect such errors right now. + +f0() +x = 1 +[case testDefinedInOuterScopeNoError] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def foo() -> None: + bar() + +def bar() -> None: + foo() +[case testFuncParams] +# flags: --enable-error-code possibly-undefined +def foo(a: int) -> None: + if int(): + a = 2 + x = a + +[case testWhile] +# flags: --enable-error-code possibly-undefined +while int(): + a = 1 + +x = a # E: Name "a" may be undefined + +while int(): + b = 1 +else: + b = 2 + +y = b # No error. + +while True: + c = 1 + if int(): + break +y = c # No error. + +# This while loop doesn't have a `break` inside, so we know that the else must always get executed. +while int(): + pass +else: + d = 1 +y = d # No error. + +while int(): + if int(): + break +else: + e = 1 +# If a while loop has a `break`, it's possible that the else didn't get executed. +y = e # E: Name "e" may be undefined + +while int(): + while int(): + if int(): + break + else: + f = 1 +else: + g = 2 + +y = f # E: Name "f" may be undefined +y = g + +[case testForLoop] +# flags: --enable-error-code possibly-undefined +for x in [1, 2, 3]: + if x: + x = 1 + y = x +else: + z = 2 + +a = z + y # E: Name "y" may be undefined + +[case testReturn] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + if int(): + x = 1 + else: + return 0 + return x + +def f2() -> int: + if int(): + x = 1 + elif int(): + return 0 + else: + x = 2 + return x + +def f3() -> int: + if int(): + x = 1 + elif int(): + return 0 + else: + y = 2 + return x # E: Name "x" may be undefined + +def f4() -> int: + if int(): + x = 1 + elif int(): + return 0 + else: + return 0 + return x + +def f5() -> int: + # This is a test against crashes. + if int(): + return 1 + if int(): + return 2 + else: + return 3 + return 1 + +def f6() -> int: + if int(): + x = 0 + return x + return x # E: Name "x" may be undefined + +[case testDefinedDifferentBranchUsedBeforeDef] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +def f0() -> None: + if int(): + x = 0 + else: + y = x # E: Name "x" is used before definition + z = x # E: Name "x" is used before definition + +def f1() -> None: + x = 1 + if int(): + x = 0 + else: + y = x # No error. + +def f2() -> None: + if int(): + x = 0 + elif int(): + y = x # E: Name "x" is used before definition + else: + y = x # E: Name "x" is used before definition + if int(): + z = x # E: Name "x" is used before definition + x = 1 + else: + x = 2 + w = x # No error. + +[case testPossiblyUndefinedLoop] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +def f0() -> None: + first_iter = True + for i in [0, 1]: + if first_iter: + first_iter = False + x = 0 + elif int(): + # This is technically a false positive but mypy isn't smart enough for this yet. + y = x # E: Name "x" may be undefined + else: + y = x # E: Name "x" may be undefined + if int(): + z = x # E: Name "x" may be undefined + x = 1 + else: + x = 2 + w = x # No error. + +def f1() -> None: + while True: + if int(): + x = 0 + else: + y = x # E: Name "x" may be undefined + z = x # E: Name "x" may be undefined + +def f2() -> None: + for i in [0, 1]: + x = i + else: + y = x # E: Name "x" may be undefined + +def f3() -> None: + while int(): + x = 1 + else: + y = x # E: Name "x" may be undefined + +def f4() -> None: + while int(): + y = x # E: Name "x" may be undefined + x: int = 1 + +[case testAssert] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + if int(): + x = 1 + else: + assert False, "something something" + return x + +def f2() -> int: + if int(): + x = 1 + elif int(): + assert False + else: + y = 2 + return x # E: Name "x" may be undefined + +[case testRaise] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + if int(): + x = 1 + else: + raise BaseException("something something") + return x + +def f2() -> int: + if int(): + x = 1 + elif int(): + raise BaseException("something something") + else: + y = 2 + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testContinue] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + while int(): + if int(): + x = 1 + else: + continue + y = x + else: + x = 2 + return x + +def f2() -> int: + while int(): + if int(): + x = 1 + elif int(): + pass + else: + continue + y = x # E: Name "x" may be undefined + return x # E: Name "x" may be undefined + +def f3() -> None: + while True: + if int(): + x = 2 + elif int(): + continue + else: + continue + y = x + +[case testBreak] +# flags: --enable-error-code possibly-undefined +def f1() -> None: + while int(): + if int(): + x = 1 + else: + break + y = x # No error -- x is always defined. + +def f2() -> None: + while int(): + if int(): + x = 1 + elif int(): + pass + else: + break + y = x # E: Name "x" may be undefined + +def f3() -> None: + while int(): + x = 1 + while int(): + if int(): + x = 2 + else: + break + y = x + z = x # E: Name "x" may be undefined + +[case testTryBasic] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f1() -> int: + try: + x = 1 + except: + pass + return x # E: Name "x" may be undefined + +def f2() -> int: + try: + pass + except: + x = 1 + return x # E: Name "x" may be undefined + +def f3() -> int: + try: + x = 1 + except: + y = x # E: Name "x" may be undefined + return x # E: Name "x" may be undefined + +def f4() -> int: + try: + x = 1 + except: + return 0 + return x + +def f5() -> int: + try: + x = 1 + except: + raise + return x + +def f6() -> None: + try: + pass + except BaseException as exc: + x = exc # No error. + exc = BaseException() + # This case is covered by the other check, not by possibly undefined check. + y = exc # E: Trying to read deleted variable "exc" + +def f7() -> int: + try: + if int(): + x = 1 + assert False + except: + pass + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testTryMultiExcept] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + try: + x = 1 + except BaseException: + x = 2 + except: + x = 3 + return x + +def f2() -> int: + try: + x = 1 + except BaseException: + pass + except: + x = 3 + return x # E: Name "x" may be undefined +[builtins fixtures/exception.pyi] + +[case testTryFinally] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +def f1() -> int: + try: + x = 1 + finally: + x = 2 + return x + +def f2() -> int: + try: + pass + except: + pass + finally: + x = 2 + return x + +def f3() -> int: + try: + x = 1 + except: + pass + finally: + y = x # E: Name "x" may be undefined + return x + +def f4() -> int: + try: + x = 0 + except BaseException: + raise + finally: + y = x # E: Name "x" may be undefined + return y + +def f5() -> int: + try: + if int(): + x = 1 + else: + return 0 + finally: + pass + return x # No error. + +def f6() -> int: + try: + if int(): + x = 1 + else: + return 0 + finally: + a = x # E: Name "x" may be undefined + return a +[builtins fixtures/exception.pyi] + +[case testTryElse] +# flags: --enable-error-code possibly-undefined +def f1() -> int: + try: + return 0 + except BaseException: + x = 1 + else: + x = 2 + finally: + y = x + return y + +def f2() -> int: + try: + pass + except: + x = 1 + else: + x = 2 + return x + +def f3() -> int: + try: + pass + except: + x = 1 + else: + pass + return x # E: Name "x" may be undefined + +def f4() -> int: + try: + x = 1 + except: + x = 2 + else: + pass + return x + +def f5() -> int: + try: + pass + except: + x = 1 + else: + return 1 + return x +[builtins fixtures/exception.pyi] + +[case testNoReturn] +# flags: --enable-error-code possibly-undefined + +from typing import NoReturn +def fail() -> NoReturn: + assert False + +def f() -> None: + if int(): + x = 1 + elif int(): + x = 2 + y = 3 + else: + # This has a NoReturn type, so we can skip it. + fail() + z = y # E: Name "y" may be undefined + z = x + +[case testDictComprehension] +# flags: --enable-error-code possibly-undefined + +def f() -> None: + for _ in [1, 2]: + key = 2 + val = 2 + + x = ( + key, # E: Name "key" may be undefined + val, # E: Name "val" may be undefined + ) + + d = [(0, "a"), (1, "b")] + {val: key for key, val in d} +[builtins fixtures/dict.pyi] + +[case testWithStmt] +# flags: --enable-error-code possibly-undefined +from contextlib import contextmanager + +@contextmanager +def ctx(*args): + yield 1 + +def f() -> None: + if int(): + a = b = 1 + x = 1 + + with ctx() as a, ctx(a) as b, ctx(x) as x: # E: Name "x" may be undefined + c = a + c = b + d = a + d = b +[builtins fixtures/tuple.pyi] + +[case testUnreachable] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def +import typing + +def f0() -> None: + if typing.TYPE_CHECKING: + x = 1 + elif int(): + y = 1 + else: + y = 2 + a = x + +def f1() -> None: + if not typing.TYPE_CHECKING: + pass + else: + z = 1 + a = z + +def f2() -> None: + if typing.TYPE_CHECKING: + x = 1 + else: + y = x +[typing fixtures/typing-medium.pyi] + +[case testUsedBeforeDef] +# flags: --enable-error-code used-before-def + +def f0() -> None: + x = y # E: Name "y" is used before definition + y: int = 1 + +def f2() -> None: + if int(): + pass + else: + # No used-before-def error. + y = z # E: Name "z" is not defined + + def inner2() -> None: + z = 0 + +def f3() -> None: + if int(): + pass + else: + y = z # E: Name "z" is used before definition + z: int = 2 + +def f4() -> None: + if int(): + pass + else: + y = z # E: Name "z" is used before definition + x = z # E: Name "z" is used before definition + z: int = 2 + +[case testUsedBeforeDefImportsBasic] +# flags: --enable-error-code used-before-def +import foo # type: ignore +import x.y # type: ignore + +def f0() -> None: + a = foo # No error. + foo: int = 1 + +def f1() -> None: + a = y # E: Name "y" is used before definition + y: int = 1 + +def f2() -> None: + a = x # No error. + x: int = 1 + +def f3() -> None: + a = x.y # No error. + x: int = 1 + +[case testUsedBeforeDefImportBasicRename] +# flags: --enable-error-code used-before-def +import x.y as z # type: ignore +from typing import Any + +def f0() -> None: + a = z # No error. + z: int = 1 + +def f1() -> None: + a = x # E: Name "x" is used before definition + x: int = 1 + +def f2() -> None: + a = x.y # E: Name "x" is used before definition + x: Any = 1 + +def f3() -> None: + a = y # E: Name "y" is used before definition + y: int = 1 + +[case testUsedBeforeDefImportFrom] +# flags: --enable-error-code used-before-def +from foo import x # type: ignore + +def f0() -> None: + a = x # No error. + x: int = 1 + +[case testUsedBeforeDefImportFromRename] +# flags: --enable-error-code used-before-def +from foo import x as y # type: ignore + +def f0() -> None: + a = y # No error. + y: int = 1 + +def f1() -> None: + a = x # E: Name "x" is used before definition + x: int = 1 + +[case testUsedBeforeDefFunctionDeclarations] +# flags: --enable-error-code used-before-def + +def f0() -> None: + def inner() -> None: + pass + + inner() # No error. + inner = lambda: None + +[case testUsedBeforeDefBuiltins] +# flags: --enable-error-code used-before-def + +def f0() -> None: + s = type(123) + type = "abc" + a = type + +[case testUsedBeforeDefBuiltinsMultipass] +# flags: --enable-error-code used-before-def + +# When doing multiple passes, mypy resolves references slightly differently. +# In this case, it would refer the earlier `type` call to the range class defined below. +_type = type # No error +_C = C # E: Name "C" is used before definition +class type: pass +class C: pass + +[case testUsedBeforeDefImplicitModuleAttrs] +# flags: --enable-error-code used-before-def +a = __name__ # No error. +__name__ = "abc" + +[case testUntypedDef] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +def f(): + if int(): + x = 0 + z = y # No used-before-def error because def is untyped. + y = x # No possibly-undefined error because def is untyped. + +[case testUntypedDefCheckUntypedDefs] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def --check-untyped-defs + +def f(): + if int(): + x = 0 + z = y # E: Name "y" is used before definition + y: int = x # E: Name "x" may be undefined + +[case testClassBody] +# flags: --enable-error-code possibly-undefined --enable-error-code used-before-def + +class A: + # The following should not only trigger an error from semantic analyzer, but not the used-before-def check. + y = x + 1 # E: Name "x" is not defined + x = 0 + # Same as above but in a loop, which should trigger a possibly-undefined error. + for _ in [1, 2, 3]: + b = a + 1 # E: Name "a" is not defined + a = 0 + + +class B: + if int(): + x = 0 + else: + # This type of check is not caught by the semantic analyzer. If we ever update it to catch such issues, + # we should make sure that errors are not double-reported. + y = x # E: Name "x" is used before definition + for _ in [1, 2, 3]: + if int(): + a = 0 + else: + # Same as above but in a loop. + b = a # E: Name "a" may be undefined diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index 3dfa30273e6f..c787b34bf26b 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -794,7 +794,7 @@ main:18: note: def attr2(self) -> str [case testSelfTypesWithProtocolsBehaveAsWithNominal] from typing import Protocol, TypeVar -T = TypeVar('T', bound=Shape) +T = TypeVar('T', bound='Shape') class Shape(Protocol): def combine(self: T, other: T) -> T: pass @@ -1153,6 +1153,25 @@ x2 = y2 # E: Incompatible types in assignment (expression has type "PP", variabl # N: Protocol member P.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] +[case testClassVarProtocolImmutable] +from typing import Protocol, ClassVar + +class P(Protocol): + @property + def x(self) -> int: ... + +class C: + x: ClassVar[int] + +class Bad: + x: ClassVar[str] + +x: P = C() +y: P = Bad() # E: Incompatible types in assignment (expression has type "Bad", variable has type "P") \ + # N: Following member(s) of "Bad" have conflicts: \ + # N: x: expected "int", got "str" +[builtins fixtures/property.pyi] + [case testSettablePropertyInProtocols] from typing import Protocol @@ -1190,6 +1209,25 @@ z4 = y4 # E: Incompatible types in assignment (expression has type "PP", variabl # N: Protocol member PPS.attr expected settable variable, got read-only attribute [builtins fixtures/property.pyi] +[case testFinalAttributeProtocol] +from typing import Protocol, Final + +class P(Protocol): + x: int + +class C: + def __init__(self, x: int) -> None: + self.x = x +class CF: + def __init__(self, x: int) -> None: + self.x: Final = x + +x: P +y: P +x = C(42) +y = CF(42) # E: Incompatible types in assignment (expression has type "CF", variable has type "P") \ + # N: Protocol member P.x expected settable variable, got read-only attribute + [case testStaticAndClassMethodsInProtocols] from typing import Protocol, Type, TypeVar @@ -2623,6 +2661,53 @@ reveal_type([b, a]) # N: Revealed type is "builtins.list[def (x: def (__main__. [builtins fixtures/list.pyi] [out] +[case testCallbackProtocolFunctionAttributesSubtyping] +from typing import Protocol + +class A(Protocol): + __name__: str + def __call__(self) -> str: ... + +class B1(Protocol): + __name__: int + def __call__(self) -> str: ... + +class B2(Protocol): + __name__: str + def __call__(self) -> int: ... + +class B3(Protocol): + __name__: str + extra_stuff: int + def __call__(self) -> str: ... + +def f() -> str: ... + +reveal_type(f.__name__) # N: Revealed type is "builtins.str" +a: A = f # OK +b1: B1 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B1") \ + # N: Following member(s) of "function" have conflicts: \ + # N: __name__: expected "int", got "str" +b2: B2 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B2") \ + # N: "B2.__call__" has type "Callable[[], int]" +b3: B3 = f # E: Incompatible types in assignment (expression has type "Callable[[], str]", variable has type "B3") \ + # N: "function" is missing following "B3" protocol member: \ + # N: extra_stuff + +[case testCallbackProtocolFunctionAttributesInference] +from typing import Protocol, TypeVar, Generic, Tuple + +T = TypeVar("T") +S = TypeVar("S", covariant=True) +class A(Protocol[T, S]): + __name__: T + def __call__(self) -> S: ... + +def f() -> int: ... +def test(func: A[T, S]) -> Tuple[T, S]: ... +reveal_type(test(f)) # N: Revealed type is "Tuple[builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + [case testProtocolsAlwaysABCs] from typing import Protocol @@ -2896,12 +2981,12 @@ c: Lst3[Str] f(Lst3(c)) # E: Argument 1 to "f" has incompatible type "Lst3[Lst3[Str]]"; expected "GetItem[GetItem[Str]]" \ # N: Following member(s) of "Lst3[Lst3[Str]]" have conflicts: \ # N: Expected: \ -# N: def __getitem__(self, int) -> GetItem[Str] \ +# N: def __getitem__(self, int, /) -> GetItem[Str] \ # N: Got: \ # N: @overload \ -# N: def __getitem__(self, slice) -> Lst3[Lst3[Str]] \ +# N: def __getitem__(self, slice, /) -> Lst3[Lst3[Str]] \ # N: @overload \ -# N: def __getitem__(self, bool) -> Lst3[Str] +# N: def __getitem__(self, bool, /) -> Lst3[Str] [builtins fixtures/list.pyi] [typing fixtures/typing-full.pyi] @@ -3086,14 +3171,14 @@ class NoneCompatible(Protocol): class A(NoneCompatible): ... A() # E: Cannot instantiate abstract class "A" with abstract attributes "f", "g", "h", "i" and "j" \ - # N: The following methods were marked implicitly abstract because they have empty function bodies: "f", "g", "h", "i" and "j". If they are not meant to be abstract, explicitly return None. + # N: The following methods were marked implicitly abstract because they have empty function bodies: "f", "g", "h", "i" and "j". If they are not meant to be abstract, explicitly `return` or `return None`. class NoneCompatible2(Protocol): def f(self, x: int): ... class B(NoneCompatible2): ... B() # E: Cannot instantiate abstract class "B" with abstract attribute "f" \ - # N: The following method was marked implicitly abstract because it has an empty function body: "f". If it is not meant to be abstract, explicitly return None. + # N: "f" is implicitly abstract because it has an empty function body. If it is not meant to be abstract, explicitly `return` or `return None`. class NoneCompatible3(Protocol): @abstractmethod @@ -3118,3 +3203,798 @@ class P(Protocol): class A(P): ... A() # E: Cannot instantiate abstract class "A" with abstract attribute "f" + +[case testProtocolWithNestedClass] +from typing import TypeVar, Protocol + +class Template(Protocol): + var: int + class Meta: ... + +class B: + var: int + class Meta: ... +class C: + var: int + class Meta(Template.Meta): ... + +def foo(t: Template) -> None: ... +foo(B()) # E: Argument 1 to "foo" has incompatible type "B"; expected "Template" \ + # N: Following member(s) of "B" have conflicts: \ + # N: Meta: expected "Type[__main__.Template.Meta]", got "Type[__main__.B.Meta]" +foo(C()) # OK + +[case testProtocolClassObjectAttribute] +from typing import ClassVar, Protocol + +class P(Protocol): + foo: int + +class A: + foo = 42 +class B: + foo: ClassVar[int] +class C: + foo: ClassVar[str] +class D: + foo: int + +def test(arg: P) -> None: ... +test(A) # OK +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: foo: expected "int", got "str" +test(D) # E: Argument 1 to "test" has incompatible type "Type[D]"; expected "P" \ + # N: Only class variables allowed for class object access on protocols, foo is an instance variable of "D" + +[case testProtocolClassObjectClassVarRejected] +from typing import ClassVar, Protocol + +class P(Protocol): + foo: ClassVar[int] + +class B: + foo: ClassVar[int] + +def test(arg: P) -> None: ... +test(B) # E: Argument 1 to "test" has incompatible type "Type[B]"; expected "P" \ + # N: ClassVar protocol member P.foo can never be matched by a class object + +[case testProtocolClassObjectPropertyRejected] +from typing import ClassVar, Protocol + +class P(Protocol): + @property + def foo(self) -> int: ... + +class B: + @property + def foo(self) -> int: ... +class C: + foo: int +class D: + foo: ClassVar[int] + +def test(arg: P) -> None: ... +# TODO: skip type mismatch diagnostics in this case. +test(B) # E: Argument 1 to "test" has incompatible type "Type[B]"; expected "P" \ + # N: Following member(s) of "B" have conflicts: \ + # N: foo: expected "int", got "Callable[[B], int]" \ + # N: Only class variables allowed for class object access on protocols, foo is an instance variable of "B" +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Only class variables allowed for class object access on protocols, foo is an instance variable of "C" +test(D) # OK +[builtins fixtures/property.pyi] + +[case testProtocolClassObjectInstanceMethod] +from typing import Any, Protocol + +class P(Protocol): + def foo(self, obj: Any) -> int: ... + +class B: + def foo(self) -> int: ... +class C: + def foo(self) -> str: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo(obj: Any) -> int \ + # N: Got: \ + # N: def foo(self: C) -> str + +[case testProtocolClassObjectInstanceMethodArg] +from typing import Any, Protocol + +class P(Protocol): + def foo(self, obj: B) -> int: ... + +class B: + def foo(self) -> int: ... +class C: + def foo(self) -> int: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo(obj: B) -> int \ + # N: Got: \ + # N: def foo(self: C) -> int + +[case testProtocolClassObjectInstanceMethodOverloaded] +from typing import Any, Protocol, overload + +class P(Protocol): + @overload + def foo(self, obj: Any, arg: int) -> int: ... + @overload + def foo(self, obj: Any, arg: str) -> str: ... + +class B: + @overload + def foo(self, arg: int) -> int: ... + @overload + def foo(self, arg: str) -> str: ... + def foo(self, arg: Any) -> Any: + ... + +class C: + @overload + def foo(self, arg: int) -> int: ... + @overload + def foo(self, arg: str) -> int: ... + def foo(self, arg: Any) -> Any: + ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: @overload \ + # N: def foo(obj: Any, arg: int) -> int \ + # N: @overload \ + # N: def foo(obj: Any, arg: str) -> str \ + # N: Got: \ + # N: @overload \ + # N: def foo(self: C, arg: int) -> int \ + # N: @overload \ + # N: def foo(self: C, arg: str) -> int + +[case testProtocolClassObjectClassMethod] +from typing import Protocol + +class P(Protocol): + def foo(self) -> int: ... + +class B: + @classmethod + def foo(cls) -> int: ... +class C: + @classmethod + def foo(cls) -> str: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo() -> int \ + # N: Got: \ + # N: def foo() -> str +[builtins fixtures/classmethod.pyi] + +[case testProtocolClassObjectStaticMethod] +from typing import Protocol + +class P(Protocol): + def foo(self) -> int: ... + +class B: + @staticmethod + def foo() -> int: ... +class C: + @staticmethod + def foo() -> str: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo() -> int \ + # N: Got: \ + # N: def foo() -> str +[builtins fixtures/staticmethod.pyi] + +[case testProtocolClassObjectGenericInstanceMethod] +from typing import Any, Protocol, Generic, List, TypeVar + +class P(Protocol): + def foo(self, obj: Any) -> List[int]: ... + +T = TypeVar("T") +class A(Generic[T]): + def foo(self) -> T: ... +class AA(A[List[T]]): ... + +class B(AA[int]): ... +class C(AA[str]): ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo(obj: Any) -> List[int] \ + # N: Got: \ + # N: def foo(self: A[List[str]]) -> List[str] +[builtins fixtures/list.pyi] + +[case testProtocolClassObjectGenericClassMethod] +from typing import Any, Protocol, Generic, List, TypeVar + +class P(Protocol): + def foo(self) -> List[int]: ... + +T = TypeVar("T") +class A(Generic[T]): + @classmethod + def foo(self) -> T: ... +class AA(A[List[T]]): ... + +class B(AA[int]): ... +class C(AA[str]): ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo() -> List[int] \ + # N: Got: \ + # N: def foo() -> List[str] +[builtins fixtures/isinstancelist.pyi] + +[case testProtocolClassObjectSelfTypeInstanceMethod] +from typing import Protocol, TypeVar, Union + +T = TypeVar("T") +class P(Protocol): + def foo(self, arg: T) -> T: ... + +class B: + def foo(self: T) -> T: ... +class C: + def foo(self: T) -> Union[T, int]: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def [T] foo(arg: T) -> T \ + # N: Got: \ + # N: def [T] foo(self: T) -> Union[T, int] + +[case testProtocolClassObjectSelfTypeClassMethod] +from typing import Protocol, Type, TypeVar + +T = TypeVar("T") +class P(Protocol): + def foo(self) -> B: ... + +class B: + @classmethod + def foo(cls: Type[T]) -> T: ... +class C: + @classmethod + def foo(cls: Type[T]) -> T: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo() -> B \ + # N: Got: \ + # N: def foo() -> C +[builtins fixtures/classmethod.pyi] + +[case testProtocolClassObjectAttributeAndCall] +from typing import Any, ClassVar, Protocol + +class P(Protocol): + foo: int + def __call__(self, x: int, y: int) -> Any: ... + +class B: + foo: ClassVar[int] + def __init__(self, x: int, y: int) -> None: ... +class C: + foo: ClassVar[int] + def __init__(self, x: int, y: str) -> None: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: "C" has constructor incompatible with "__call__" of "P" + +[case testProtocolClassObjectPureCallback] +from typing import Any, ClassVar, Protocol + +class P(Protocol): + def __call__(self, x: int, y: int) -> Any: ... + +class B: + def __init__(self, x: int, y: int) -> None: ... +class C: + def __init__(self, x: int, y: str) -> None: ... + +def test(arg: P) -> None: ... +test(B) # OK +test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: "C" has constructor incompatible with "__call__" of "P" +[builtins fixtures/type.pyi] + +[case testProtocolTypeTypeAttribute] +from typing import ClassVar, Protocol, Type + +class P(Protocol): + foo: int + +class A: + foo = 42 +class B: + foo: ClassVar[int] +class C: + foo: ClassVar[str] +class D: + foo: int + +def test(arg: P) -> None: ... +a: Type[A] +b: Type[B] +c: Type[C] +d: Type[D] +test(a) # OK +test(b) # OK +test(c) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: foo: expected "int", got "str" +test(d) # E: Argument 1 to "test" has incompatible type "Type[D]"; expected "P" \ + # N: Only class variables allowed for class object access on protocols, foo is an instance variable of "D" + +[case testProtocolTypeTypeInstanceMethod] +from typing import Any, Protocol, Type + +class P(Protocol): + def foo(self, cls: Any) -> int: ... + +class B: + def foo(self) -> int: ... +class C: + def foo(self) -> str: ... + +def test(arg: P) -> None: ... +b: Type[B] +c: Type[C] +test(b) # OK +test(c) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo(cls: Any) -> int \ + # N: Got: \ + # N: def foo(self: C) -> str + +[case testProtocolTypeTypeClassMethod] +from typing import Protocol, Type + +class P(Protocol): + def foo(self) -> int: ... + +class B: + @classmethod + def foo(cls) -> int: ... +class C: + @classmethod + def foo(cls) -> str: ... + +def test(arg: P) -> None: ... +b: Type[B] +c: Type[C] +test(b) # OK +test(c) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def foo() -> int \ + # N: Got: \ + # N: def foo() -> str +[builtins fixtures/classmethod.pyi] + +[case testProtocolTypeTypeSelfTypeInstanceMethod] +from typing import Protocol, Type, TypeVar, Union + +T = TypeVar("T") +class P(Protocol): + def foo(self, arg: T) -> T: ... + +class B: + def foo(self: T) -> T: ... +class C: + def foo(self: T) -> Union[T, int]: ... + +def test(arg: P) -> None: ... +b: Type[B] +c: Type[C] +test(b) # OK +test(c) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def [T] foo(arg: T) -> T \ + # N: Got: \ + # N: def [T] foo(self: T) -> Union[T, int] + +[case testProtocolClassObjectInference] +from typing import Any, Protocol, TypeVar + +T = TypeVar("T", contravariant=True) +class P(Protocol[T]): + def foo(self, obj: T) -> int: ... + +class B: + def foo(self) -> int: ... + +S = TypeVar("S") +def test(arg: P[S]) -> S: ... +reveal_type(test(B)) # N: Revealed type is "__main__.B" + +[case testProtocolTypeTypeInference] +from typing import Any, Protocol, TypeVar, Type + +T = TypeVar("T", contravariant=True) +class P(Protocol[T]): + def foo(self, obj: T) -> int: ... + +class B: + def foo(self) -> int: ... + +S = TypeVar("S") +def test(arg: P[S]) -> S: ... +b: Type[B] +reveal_type(test(b)) # N: Revealed type is "__main__.B" + +[case testTypeAliasInProtocolBody] +from typing import Protocol, List + +class P(Protocol): + x = List[str] # E: Type aliases are prohibited in protocol bodies \ + # N: Use variable annotation syntax to define protocol members + +class C: + x: int +def foo(x: P) -> None: ... +foo(C()) # No extra error here +[builtins fixtures/list.pyi] + +[case testTypeVarInProtocolBody] +from typing import Protocol, TypeVar + +class C(Protocol): + T = TypeVar('T') + def __call__(self, t: T) -> T: ... + +def f_bad(t: int) -> int: + return t + +S = TypeVar("S") +def f_good(t: S) -> S: + return t + +g: C = f_bad # E: Incompatible types in assignment (expression has type "Callable[[int], int]", variable has type "C") \ + # N: "C.__call__" has type "Callable[[Arg(T, 't')], T]" +g = f_good # OK + +[case testModuleAsProtocolImplementation] +import default_config +import bad_config_1 +import bad_config_2 +import bad_config_3 +from typing import Protocol + +class Options(Protocol): + timeout: int + one_flag: bool + other_flag: bool + def update(self) -> bool: ... + +def setup(options: Options) -> None: ... +setup(default_config) # OK +setup(bad_config_1) # E: Argument 1 to "setup" has incompatible type Module; expected "Options" \ + # N: "ModuleType" is missing following "Options" protocol member: \ + # N: timeout +setup(bad_config_2) # E: Argument 1 to "setup" has incompatible type Module; expected "Options" \ + # N: Following member(s) of Module "bad_config_2" have conflicts: \ + # N: one_flag: expected "bool", got "int" +setup(bad_config_3) # E: Argument 1 to "setup" has incompatible type Module; expected "Options" \ + # N: Following member(s) of Module "bad_config_3" have conflicts: \ + # N: Expected: \ + # N: def update() -> bool \ + # N: Got: \ + # N: def update(obj: Any) -> bool + +[file default_config.py] +timeout = 100 +one_flag = True +other_flag = False +def update() -> bool: ... + +[file bad_config_1.py] +one_flag = True +other_flag = False +def update() -> bool: ... + +[file bad_config_2.py] +timeout = 100 +one_flag = 42 +other_flag = False +def update() -> bool: ... + +[file bad_config_3.py] +timeout = 100 +one_flag = True +other_flag = False +def update(obj) -> bool: ... +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolImplementationInference] +import default_config +from typing import Protocol, TypeVar + +T = TypeVar("T", covariant=True) +class Options(Protocol[T]): + timeout: int + one_flag: bool + other_flag: bool + def update(self) -> T: ... + +def setup(options: Options[T]) -> T: ... +reveal_type(setup(default_config)) # N: Revealed type is "builtins.str" + +[file default_config.py] +timeout = 100 +one_flag = True +other_flag = False +def update() -> str: ... +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolImplementationClassObject] +import runner +import bad_runner +from typing import Callable, Protocol + +class Runner(Protocol): + @property + def Run(self) -> Callable[[int], Result]: ... + +class Result(Protocol): + value: int + +def run(x: Runner) -> None: ... +run(runner) # OK +run(bad_runner) # E: Argument 1 to "run" has incompatible type Module; expected "Runner" \ + # N: Following member(s) of Module "bad_runner" have conflicts: \ + # N: Expected: \ + # N: def (int, /) -> Result \ + # N: Got: \ + # N: def __init__(arg: str) -> Run + +[file runner.py] +class Run: + value: int + def __init__(self, arg: int) -> None: ... + +[file bad_runner.py] +class Run: + value: int + def __init__(self, arg: str) -> None: ... +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolImplementationTypeAlias] +import runner +import bad_runner +from typing import Callable, Protocol + +class Runner(Protocol): + @property + def run(self) -> Callable[[int], Result]: ... + +class Result(Protocol): + value: int + +def run(x: Runner) -> None: ... +run(runner) # OK +run(bad_runner) # E: Argument 1 to "run" has incompatible type Module; expected "Runner" \ + # N: Following member(s) of Module "bad_runner" have conflicts: \ + # N: Expected: \ + # N: def (int, /) -> Result \ + # N: Got: \ + # N: def __init__(arg: str) -> Run + +[file runner.py] +class Run: + value: int + def __init__(self, arg: int) -> None: ... +run = Run + +[file bad_runner.py] +class Run: + value: int + def __init__(self, arg: str) -> None: ... +run = Run +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolImplementationClassVar] +from typing import ClassVar, Protocol +import mod + +class My(Protocol): + x: ClassVar[int] + +def test(mod: My) -> None: ... +test(mod=mod) # E: Argument "mod" to "test" has incompatible type Module; expected "My" \ + # N: Protocol member My.x expected class variable, got instance variable +[file mod.py] +x: int +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolImplementationFinal] +from typing import Protocol +import some_module + +class My(Protocol): + a: int + +def func(arg: My) -> None: ... +func(some_module) # E: Argument 1 to "func" has incompatible type Module; expected "My" \ + # N: Protocol member My.a expected settable variable, got read-only attribute + +[file some_module.py] +from typing_extensions import Final + +a: Final = 1 +[builtins fixtures/module.pyi] + + +[case testModuleAsProtocolRedefinitionTopLevel] +from typing import Protocol + +class P(Protocol): + def f(self) -> str: ... + +cond: bool +t: P +if cond: + import mod1 as t +else: + import mod2 as t + +import badmod as t # E: Incompatible import of "t" (imported name has type Module, local name has type "P") + +[file mod1.py] +def f() -> str: ... + +[file mod2.py] +def f() -> str: ... + +[file badmod.py] +def nothing() -> int: ... +[builtins fixtures/module.pyi] + +[case testModuleAsProtocolRedefinitionImportFrom] +from typing import Protocol + +class P(Protocol): + def f(self) -> str: ... + +cond: bool +t: P +if cond: + from package import mod1 as t +else: + from package import mod2 as t + +from package import badmod as t # E: Incompatible import of "t" (imported name has type Module, local name has type "P") + +package: int = 10 + +import package.mod1 as t +import package.mod1 # E: Incompatible import of "package" (imported name has type Module, local name has type "int") + +[file package/mod1.py] +def f() -> str: ... + +[file package/mod2.py] +def f() -> str: ... + +[file package/badmod.py] +def nothing() -> int: ... +[builtins fixtures/module.pyi] + +[case testProtocolSelfTypeNewSyntax] +from typing import Protocol, Self + +class P(Protocol): + @property + def next(self) -> Self: ... + +class C: + next: C +class S: + next: Self + +x: P = C() +y: P = S() + +z: P +reveal_type(S().next) # N: Revealed type is "__main__.S" +reveal_type(z.next) # N: Revealed type is "__main__.P" +[builtins fixtures/property.pyi] + +[case testProtocolSelfTypeNewSyntaxSubProtocol] +from typing import Protocol, Self + +class P(Protocol): + @property + def next(self) -> Self: ... +class PS(P, Protocol): + @property + def other(self) -> Self: ... + +class C: + next: C + other: C +class S: + next: Self + other: Self + +x: PS = C() +y: PS = S() +[builtins fixtures/property.pyi] + +[case testProtocolClassVarSelfType] +from typing import ClassVar, Self, Protocol + +class P(Protocol): + DEFAULT: ClassVar[Self] +class C: + DEFAULT: ClassVar[C] + +x: P = C() + +[case testInferenceViaTypeTypeMetaclass] +from typing import Iterator, Iterable, TypeVar, Type + +M = TypeVar("M") + +class Meta(type): + def __iter__(self: Type[M]) -> Iterator[M]: ... +class Foo(metaclass=Meta): ... + +T = TypeVar("T") +def test(x: Iterable[T]) -> T: ... + +reveal_type(test(Foo)) # N: Revealed type is "__main__.Foo" +t_foo: Type[Foo] +reveal_type(test(t_foo)) # N: Revealed type is "__main__.Foo" + +TF = TypeVar("TF", bound=Foo) +def outer(cls: Type[TF]) -> TF: + reveal_type(test(cls)) # N: Revealed type is "TF`-1" + return cls() diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 0003ad2601e0..7a934348aaf2 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -317,6 +317,21 @@ match x: case [str()]: pass +[case testMatchSequencePatternWithInvalidClassPattern] +class Example: + __match_args__ = ("value",) + def __init__(self, value: str) -> None: + self.value = value + +SubClass: type[Example] + +match [SubClass("a"), SubClass("b")]: + case [SubClass(value), *rest]: # E: Expected type in class pattern; found "Type[__main__.Example]" + reveal_type(value) # E: Cannot determine type of "value" \ + # N: Revealed type is "Any" + reveal_type(rest) # N: Revealed type is "builtins.list[__main__.Example]" +[builtins fixtures/tuple.pyi] + [case testMatchSequenceUnion-skip] from typing import List, Union m: Union[List[List[str]], str] @@ -1600,3 +1615,229 @@ def foo(x: NoneType): # E: NoneType should not be used as a type, please use Non reveal_type(x) # N: Revealed type is "None" [builtins fixtures/tuple.pyi] + +[case testMatchTupleInstanceUnionNoCrash] +from typing import Union + +def func(e: Union[str, tuple[str]]) -> None: + match e: + case (a,) if isinstance(a, str): + reveal_type(a) # N: Revealed type is "builtins.str" +[builtins fixtures/tuple.pyi] + +[case testMatchTupleOptionalNoCrash] +# flags: --strict-optional +foo: tuple[int] | None +match foo: + case x,: + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testMatchUnionTwoTuplesNoCrash] +var: tuple[int, int] | tuple[str, str] + +# TODO: we can infer better here. +match var: + case (42, a): + reveal_type(a) # N: Revealed type is "Union[builtins.int, builtins.str]" + case ("yes", b): + reveal_type(b) # N: Revealed type is "Union[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testMatchNamedAndKeywordsAreTheSame] +from typing import Generic, TypeVar, Union +from typing_extensions import Final +from dataclasses import dataclass + +T = TypeVar("T") + +class Regular: + x: str + y: int + __match_args__ = ("x",) +class ReveresedOrder: + x: int + y: str + __match_args__ = ("y",) +class GenericRegular(Generic[T]): + x: T + __match_args__ = ("x",) +class GenericWithFinal(Generic[T]): + x: T + __match_args__: Final = ("x",) +class RegularSubtype(GenericRegular[str]): ... + +@dataclass +class GenericDataclass(Generic[T]): + x: T + +input_arg: Union[ + Regular, + ReveresedOrder, + GenericRegular[str], + GenericWithFinal[str], + RegularSubtype, + GenericDataclass[str], +] + +# Positional: +match input_arg: + case Regular(a): + reveal_type(a) # N: Revealed type is "builtins.str" + case ReveresedOrder(a): + reveal_type(a) # N: Revealed type is "builtins.str" + case GenericWithFinal(a): + reveal_type(a) # N: Revealed type is "builtins.str" + case RegularSubtype(a): + reveal_type(a) # N: Revealed type is "builtins.str" + case GenericRegular(a): + reveal_type(a) # N: Revealed type is "builtins.str" + case GenericDataclass(a): + reveal_type(a) # N: Revealed type is "builtins.str" + +# Keywords: +match input_arg: + case Regular(x=a): + reveal_type(a) # N: Revealed type is "builtins.str" + case ReveresedOrder(x=b): # Order is different + reveal_type(b) # N: Revealed type is "builtins.int" + case GenericWithFinal(x=a): + reveal_type(a) # N: Revealed type is "builtins.str" + case RegularSubtype(x=a): + reveal_type(a) # N: Revealed type is "builtins.str" + case GenericRegular(x=a): + reveal_type(a) # N: Revealed type is "builtins.str" + case GenericDataclass(x=a): + reveal_type(a) # N: Revealed type is "builtins.str" +[builtins fixtures/dataclasses.pyi] + +[case testMatchValueConstrainedTypeVar] +from typing import TypeVar, Iterable + +S = TypeVar("S", int, str) + +def my_func(pairs: Iterable[tuple[S, S]]) -> None: + for pair in pairs: + reveal_type(pair) # N: Revealed type is "Tuple[builtins.int, builtins.int]" \ + # N: Revealed type is "Tuple[builtins.str, builtins.str]" + match pair: + case _: + reveal_type(pair) # N: Revealed type is "Tuple[builtins.int, builtins.int]" \ + # N: Revealed type is "Tuple[builtins.str, builtins.str]" +[builtins fixtures/tuple.pyi] + +[case testPossiblyUndefinedMatch] +# flags: --enable-error-code possibly-undefined +def f0(x: int | str) -> int: + match x: + case int(): + y = 1 + return y # E: Name "y" may be undefined + +def f1(a: object) -> None: + match a: + case [y]: pass + case _: + y = 1 + x = 2 + z = y + z = x # E: Name "x" may be undefined + +def f2(a: object) -> None: + match a: + case [[y] as x]: pass + case {"k1": 1, "k2": x, "k3": y}: pass + case [0, *x]: + y = 2 + case _: + y = 1 + x = [2] + z = x + z = y + +def f3(a: object) -> None: + y = 1 + match a: + case [x]: + y = 2 + # Note the missing `case _:` + z = x # E: Name "x" may be undefined + z = y + +def f4(a: object) -> None: + y = 1 + match a: + case [x]: + y = 2 + case _: + assert False, "unsupported" + z = x + z = y + +def f5(a: object) -> None: + match a: + case tuple(x): pass + case _: + return + y = x + +def f6(a: object) -> None: + if int(): + y = 1 + match a: + case _ if y is not None: # E: Name "y" may be undefined + pass +[builtins fixtures/tuple.pyi] + +[case testPossiblyUndefinedMatchUnreachable] +# flags: --enable-error-code possibly-undefined +import typing + +def f0(x: int) -> int: + match x: + case 1 if not typing.TYPE_CHECKING: + pass + case 2: + y = 2 + case _: + y = 3 + return y # No error. + +def f1(x: int) -> int: + match x: + case 1 if not typing.TYPE_CHECKING: + pass + case 2: + y = 2 + return y # E: Name "y" may be undefined + +[typing fixtures/typing-medium.pyi] + +[case testUsedBeforeDefMatchWalrus] +# flags: --enable-error-code used-before-def +import typing + +def f0(x: int) -> None: + a = y # E: Cannot determine type of "y" # E: Name "y" is used before definition + match y := x: + case 1: + b = y + case 2: + c = y + d = y + +[case testTypeAliasWithNewUnionSyntaxAndNoneLeftOperand] +from typing import overload +class C: + @overload + def __init__(self) -> None: pass + @overload + def __init__(self, x: int) -> None: pass + def __init__(self, x=0): + pass + +class D: pass + +X = None | C +Y = None | D +[builtins fixtures/type.pyi] diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test new file mode 100644 index 000000000000..7196f10f8863 --- /dev/null +++ b/test-data/unit/check-python311.test @@ -0,0 +1,65 @@ +[case testTryStarSimple] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +[builtins fixtures/exception.pyi] + +[case testTryStarMultiple] +try: + pass +except* Exception as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.Exception]" +except* RuntimeError as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[builtins.RuntimeError]" +[builtins fixtures/exception.pyi] + +[case testTryStarBase] +try: + pass +except* BaseException as e: + reveal_type(e) # N: Revealed type is "builtins.BaseExceptionGroup[builtins.BaseException]" +[builtins fixtures/exception.pyi] + +[case testTryStarTuple] +class Custom(Exception): ... + +try: + pass +except* (RuntimeError, Custom) as e: + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, __main__.Custom]]" +[builtins fixtures/exception.pyi] + +[case testTryStarInvalidType] +class Bad: ... +try: + pass +except* (RuntimeError, Bad) as e: # E: Exception type must be derived from BaseException + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalid] +try: + pass +except* ExceptionGroup as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" +[builtins fixtures/exception.pyi] + +[case testTryStarGroupInvalidTuple] +try: + pass +except* (RuntimeError, ExceptionGroup) as e: # E: Exception type in except* cannot derive from BaseExceptionGroup + reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Union[builtins.RuntimeError, Any]]" +[builtins fixtures/exception.pyi] + +[case testBasicTypeVarTupleGeneric] +from typing import Generic, TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +class Variadic(Generic[Unpack[Ts]]): + ... + +variadic: Variadic[int, str] +reveal_type(variadic) # N: Revealed type is "__main__.Variadic[builtins.int, builtins.str]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index deded7a52f72..b9f9f2173ae1 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -115,21 +115,25 @@ def g(x: int): ... ) # type: ignore # E: Unused "type: ignore" comment [case testPEP570ArgTypesMissing] -# flags: --disallow-untyped-defs +# flags: --python-version 3.8 --disallow-untyped-defs def f(arg, /) -> None: ... # E: Function is missing a type annotation for one or more arguments [case testPEP570ArgTypesBadDefault] +# flags: --python-version 3.8 def f(arg: int = "ERROR", /) -> None: ... # E: Incompatible default for argument "arg" (default has type "str", argument has type "int") [case testPEP570ArgTypesDefault] +# flags: --python-version 3.8 def f(arg: int = 0, /) -> None: reveal_type(arg) # N: Revealed type is "builtins.int" [case testPEP570ArgTypesRequired] +# flags: --python-version 3.8 def f(arg: int, /) -> None: reveal_type(arg) # N: Revealed type is "builtins.int" [case testPEP570Required] +# flags: --python-version 3.8 def f(arg: int, /) -> None: ... # N: "f" defined here f(1) f("ERROR") # E: Argument 1 to "f" has incompatible type "str"; expected "int" @@ -137,6 +141,7 @@ f(arg=1) # E: Unexpected keyword argument "arg" for "f" f(arg="ERROR") # E: Unexpected keyword argument "arg" for "f" [case testPEP570Default] +# flags: --python-version 3.8 def f(arg: int = 0, /) -> None: ... # N: "f" defined here f() f(1) @@ -145,6 +150,7 @@ f(arg=1) # E: Unexpected keyword argument "arg" for "f" f(arg="ERROR") # E: Unexpected keyword argument "arg" for "f" [case testPEP570Calls] +# flags: --python-version 3.8 --no-strict-optional from typing import Any, Dict def f(p, /, p_or_kw, *, kw) -> None: ... # N: "f" defined here d = None # type: Dict[Any, Any] @@ -157,6 +163,7 @@ f(**d) # E: Missing positional argument "p_or_kw" in call to "f" [builtins fixtures/dict.pyi] [case testPEP570Signatures1] +# flags: --python-version 3.8 def f(p1: bytes, p2: float, /, p_or_kw: int, *, kw: str) -> None: reveal_type(p1) # N: Revealed type is "builtins.bytes" reveal_type(p2) # N: Revealed type is "builtins.float" @@ -164,6 +171,7 @@ def f(p1: bytes, p2: float, /, p_or_kw: int, *, kw: str) -> None: reveal_type(kw) # N: Revealed type is "builtins.str" [case testPEP570Signatures2] +# flags: --python-version 3.8 def f(p1: bytes, p2: float = 0.0, /, p_or_kw: int = 0, *, kw: str) -> None: reveal_type(p1) # N: Revealed type is "builtins.bytes" reveal_type(p2) # N: Revealed type is "builtins.float" @@ -171,28 +179,33 @@ def f(p1: bytes, p2: float = 0.0, /, p_or_kw: int = 0, *, kw: str) -> None: reveal_type(kw) # N: Revealed type is "builtins.str" [case testPEP570Signatures3] +# flags: --python-version 3.8 def f(p1: bytes, p2: float = 0.0, /, *, kw: int) -> None: reveal_type(p1) # N: Revealed type is "builtins.bytes" reveal_type(p2) # N: Revealed type is "builtins.float" reveal_type(kw) # N: Revealed type is "builtins.int" [case testPEP570Signatures4] +# flags: --python-version 3.8 def f(p1: bytes, p2: int = 0, /) -> None: reveal_type(p1) # N: Revealed type is "builtins.bytes" reveal_type(p2) # N: Revealed type is "builtins.int" [case testPEP570Signatures5] +# flags: --python-version 3.8 def f(p1: bytes, p2: float, /, p_or_kw: int) -> None: reveal_type(p1) # N: Revealed type is "builtins.bytes" reveal_type(p2) # N: Revealed type is "builtins.float" reveal_type(p_or_kw) # N: Revealed type is "builtins.int" [case testPEP570Signatures6] +# flags: --python-version 3.8 def f(p1: bytes, p2: float, /) -> None: reveal_type(p1) # N: Revealed type is "builtins.bytes" reveal_type(p2) # N: Revealed type is "builtins.float" [case testPEP570Unannotated] +# flags: --python-version 3.8 def f(arg, /): ... # N: "f" defined here g = lambda arg, /: arg def h(arg=0, /): ... # N: "h" defined here @@ -297,7 +310,7 @@ def f(x: int = (c := 4)) -> int: z2: NT # E: Variable "NT" is not valid as a type \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases - if Alias := int: + if Alias := int: # E: Function "Type[int]" could always be true in boolean context z3: Alias # E: Variable "Alias" is not valid as a type \ # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases @@ -386,7 +399,7 @@ reveal_type(z2) # E: Name "z2" is not defined # N: Revealed type is "Any" [case testWalrusConditionalTypeBinder] # flags: --python-version 3.8 -from typing import Union +from typing import Tuple, Union from typing_extensions import Literal class Good: @@ -403,7 +416,14 @@ if (thing := get_thing()).is_good: reveal_type(thing) # N: Revealed type is "__main__.Good" else: reveal_type(thing) # N: Revealed type is "__main__.Bad" -[builtins fixtures/property.pyi] + +def get_things() -> Union[Tuple[Good], Tuple[Bad]]: ... + +if (things := get_things())[0].is_good: + reveal_type(things) # N: Revealed type is "Tuple[__main__.Good]" +else: + reveal_type(things) # N: Revealed type is "Tuple[__main__.Bad]" +[builtins fixtures/list.pyi] [case testWalrusConditionalTypeCheck] # flags: --strict-optional --python-version 3.8 @@ -553,7 +573,16 @@ def foo() -> None: [x := x + y for y in [1, 2, 3]] [builtins fixtures/dict.pyi] +[case testWalrusUsedBeforeDef] +# flags: --python-version 3.8 +class C: + def f(self, c: 'C') -> None: pass + +(x := C()).f(y) # E: Cannot determine type of "y" # E: Name "y" is used before definition +(y := C()).f(y) + [case testOverloadWithPositionalOnlySelf] +# flags: --python-version 3.8 from typing import overload, Optional class Foo: @@ -576,3 +605,173 @@ class Bar: def f(self, a: Optional[str] = None, /, *, b: bool = False) -> None: ... [builtins fixtures/bool.pyi] + +[case testOverloadPositionalOnlyErrorMessage] +# flags: --python-version 3.8 +from typing import overload + +@overload +def foo(a: int, /): ... +@overload +def foo(a: str): ... +def foo(a): ... + +foo(a=1) +[out] +main:10: error: No overload variant of "foo" matches argument type "int" +main:10: note: Possible overload variants: +main:10: note: def foo(int, /) -> Any +main:10: note: def foo(a: str) -> Any + +[case testOverloadPositionalOnlyErrorMessageAllTypes] +# flags: --python-version 3.8 +from typing import overload + +@overload +def foo(a: int, /, b: int, *, c: int): ... +@overload +def foo(a: str, b: int, *, c: int): ... +def foo(a, b, *, c): ... + +foo(a=1) +[out] +main:10: error: No overload variant of "foo" matches argument type "int" +main:10: note: Possible overload variants: +main:10: note: def foo(int, /, b: int, *, c: int) -> Any +main:10: note: def foo(a: str, b: int, *, c: int) -> Any + +[case testOverloadPositionalOnlyErrorMessageMultiplePosArgs] +# flags: --python-version 3.8 +from typing import overload + +@overload +def foo(a: int, b: int, c: int, /, d: str): ... +@overload +def foo(a: str, b: int, c: int, d: str): ... +def foo(a, b, c, d): ... + +foo(a=1) +[out] +main:10: error: No overload variant of "foo" matches argument type "int" +main:10: note: Possible overload variants: +main:10: note: def foo(int, int, int, /, d: str) -> Any +main:10: note: def foo(a: str, b: int, c: int, d: str) -> Any + +[case testOverloadPositionalOnlyErrorMessageMethod] +# flags: --python-version 3.8 +from typing import overload + +class Some: + @overload + def foo(self, __a: int): ... + @overload + def foo(self, a: float, /): ... + @overload + def foo(self, a: str): ... + def foo(self, a): ... + +Some().foo(a=1) +[out] +main:13: error: No overload variant of "foo" of "Some" matches argument type "int" +main:13: note: Possible overload variants: +main:13: note: def foo(self, int, /) -> Any +main:13: note: def foo(self, float, /) -> Any +main:13: note: def foo(self, a: str) -> Any + +[case testOverloadPositionalOnlyErrorMessageClassMethod] +# flags: --python-version 3.8 +from typing import overload + +class Some: + @overload + @classmethod + def foo(cls, __a: int): ... + @overload + @classmethod + def foo(cls, a: float, /): ... + @overload + @classmethod + def foo(cls, a: str): ... + @classmethod + def foo(cls, a): ... + +Some.foo(a=1) +[builtins fixtures/classmethod.pyi] +[out] +main:17: error: No overload variant of "foo" of "Some" matches argument type "int" +main:17: note: Possible overload variants: +main:17: note: def foo(cls, int, /) -> Any +main:17: note: def foo(cls, float, /) -> Any +main:17: note: def foo(cls, a: str) -> Any + +[case testUnpackWithDuplicateNamePositionalOnly] +# flags: --python-version 3.8 +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int +def foo(name: str, /, **kwargs: Unpack[Person]) -> None: # Allowed + ... +[builtins fixtures/dict.pyi] + +[case testPossiblyUndefinedWithAssignmentExpr] +# flags: --python-version 3.8 --enable-error-code possibly-undefined +def f1() -> None: + d = {0: 1} + if int(): + x = 1 + + if (x := d[x]) is None: # E: Name "x" may be undefined + y = x + z = x +[builtins fixtures/dict.pyi] + +[case testNarrowOnSelfInGeneric] +# flags: --strict-optional +from typing import Generic, TypeVar, Optional + +T = TypeVar("T", int, str) + +class C(Generic[T]): + x: Optional[T] + def meth(self) -> Optional[T]: + if (y := self.x) is not None: + reveal_type(y) + return None +[out] +main:10: note: Revealed type is "builtins.int" +main:10: note: Revealed type is "builtins.str" + +[case testTypeGuardWithPositionalOnlyArg] +# flags: --python-version 3.8 +from typing_extensions import TypeGuard + +def typeguard(x: object, /) -> TypeGuard[int]: + ... + +n: object +if typeguard(n): + reveal_type(n) +[builtins fixtures/tuple.pyi] +[out] +main:9: note: Revealed type is "builtins.int" + +[case testTypeGuardKeywordFollowingWalrus] +# flags: --python-version 3.8 +from typing import cast +from typing_extensions import TypeGuard + +def typeguard(x: object) -> TypeGuard[int]: + ... + +if typeguard(x=(n := cast(object, "hi"))): + reveal_type(n) +[builtins fixtures/tuple.pyi] +[out] +main:9: note: Revealed type is "builtins.int" + +[case testNoCrashOnAssignmentExprClass] +class C: + [(j := i) for i in [1, 2, 3]] # E: Assignment expression within a comprehension cannot be used in a class body +[builtins fixtures/list.pyi] diff --git a/test-data/unit/check-python39.test b/test-data/unit/check-python39.test index d169f4001015..105051a840bb 100644 --- a/test-data/unit/check-python39.test +++ b/test-data/unit/check-python39.test @@ -4,9 +4,9 @@ # most important test, to deal with this we'll only run this test with Python 3.9 and later. import typing def f(a: 'A', b: 'B') -> None: pass -f(a=A(), b=B(), a=A()) # E: "f" gets multiple values for keyword argument "a" class A: pass class B: pass +f(a=A(), b=B(), a=A()) # E: "f" gets multiple values for keyword argument "a" [case testPEP614] diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index c326246436ba..b7b4372ecc12 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -1,7 +1,6 @@ -- Tests checking that basic functionality works [case testRecursiveAliasBasic] -# flags: --enable-recursive-aliases from typing import Dict, List, Union, TypeVar, Sequence JSON = Union[str, List[JSON], Dict[str, JSON]] @@ -17,7 +16,6 @@ x = ["foo", {"bar": [Bad()]}] # E: List item 0 has incompatible type "Bad"; exp [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasBasicGenericSubtype] -# flags: --enable-recursive-aliases from typing import Union, TypeVar, Sequence, List T = TypeVar("T") @@ -37,7 +35,6 @@ xx = yy # OK [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasBasicGenericInference] -# flags: --enable-recursive-aliases from typing import Union, TypeVar, Sequence, List T = TypeVar("T") @@ -61,7 +58,6 @@ x = [1, [Bad()]] # E: List item 0 has incompatible type "Bad"; expected "Union[ [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasGenericInferenceNested] -# flags: --enable-recursive-aliases from typing import Union, TypeVar, Sequence, List T = TypeVar("T") @@ -77,7 +73,6 @@ reveal_type(flatten([[B(), [[B()]]]])) # N: Revealed type is "builtins.list[__m [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasNewStyleSupported] -# flags: --enable-recursive-aliases from test import A x: A @@ -93,7 +88,7 @@ A = int | list[A] -- Tests duplicating some existing type alias tests with recursive aliases enabled [case testRecursiveAliasesMutual] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def from typing import Type, Callable, Union A = Union[B, int] @@ -103,7 +98,6 @@ x: A reveal_type(x) # N: Revealed type is "Union[def (Union[Type[def (...) -> builtins.int], Type[builtins.int]]) -> builtins.int, builtins.int]" [case testRecursiveAliasesProhibited-skip] -# flags: --enable-recursive-aliases from typing import Type, Callable, Union A = Union[B, int] @@ -111,7 +105,6 @@ B = Union[A, int] C = Type[C] [case testRecursiveAliasImported] -# flags: --enable-recursive-aliases import lib x: lib.A reveal_type(x) # N: Revealed type is "builtins.list[builtins.list[...]]" @@ -128,7 +121,7 @@ B = List[A] [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def from typing import List x: B @@ -140,7 +133,7 @@ reveal_type(x[0][0]) # N: Revealed type is "__main__.C" [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass2] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def from typing import NewType, List x: D @@ -154,7 +147,6 @@ class B(D): [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClass3] -# flags: --enable-recursive-aliases from typing import List, Generic, TypeVar, NamedTuple T = TypeVar('T') @@ -173,7 +165,7 @@ reveal_type(x) # N: Revealed type is "__main__.G[Tuple[builtins.int, fallback=_ [builtins fixtures/list.pyi] [case testRecursiveAliasViaBaseClassImported] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def import a [file a.py] from typing import List @@ -190,7 +182,6 @@ reveal_type(f) # N: Revealed type is "def (x: builtins.list[a.C]) -> builtins.l [builtins fixtures/list.pyi] [case testRecursiveAliasViaNamedTuple] -# flags: --enable-recursive-aliases from typing import List, NamedTuple, Union Exp = Union['A', 'B'] @@ -210,7 +201,6 @@ my_eval(A([B(1), B(2)])) [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasesSimplifiedUnion] -# flags: --enable-recursive-aliases from typing import Sequence, TypeVar, Union class A: ... @@ -231,7 +221,6 @@ x = y # E: Incompatible types in assignment (expression has type "Sequence[Unio [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasesJoins] -# flags: --enable-recursive-aliases from typing import Sequence, TypeVar, Union class A: ... @@ -257,7 +246,6 @@ x = y3 # E: Incompatible types in assignment (expression has type "Sequence[Uni [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasesRestrictions] -# flags: --enable-recursive-aliases from typing import Sequence, Mapping, Union A = Sequence[Union[int, A]] @@ -272,7 +260,6 @@ else: [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasesRestrictions2] -# flags: --enable-recursive-aliases from typing import Sequence, Union class A: ... @@ -296,7 +283,6 @@ if isinstance(b[0], Sequence): [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasWithRecursiveInstance] -# flags: --enable-recursive-aliases from typing import Sequence, Union, TypeVar class A: ... @@ -317,7 +303,6 @@ reveal_type(join(b, a)) # N: Revealed type is "typing.Sequence[Union[__main__.A [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasWithRecursiveInstanceInference] -# flags: --enable-recursive-aliases from typing import Sequence, Union, TypeVar, List T = TypeVar("T") @@ -338,7 +323,6 @@ reveal_type(bar(nib)) # N: Revealed type is "__main__.B" [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasTopUnion] -# flags: --enable-recursive-aliases from typing import Sequence, Union, TypeVar, List class A: ... @@ -363,7 +347,6 @@ reveal_type(foo(xx)) # N: Revealed type is "__main__.B" [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasInferenceExplicitNonRecursive] -# flags: --enable-recursive-aliases from typing import Sequence, Union, TypeVar, List T = TypeVar("T") @@ -390,7 +373,6 @@ reveal_type(bar(llla)) # N: Revealed type is "__main__.A" [builtins fixtures/isinstancelist.pyi] [case testRecursiveAliasesWithOptional] -# flags: --enable-recursive-aliases from typing import Optional, Sequence A = Sequence[Optional[A]] @@ -398,7 +380,7 @@ x: A y: str = x[0] # E: Incompatible types in assignment (expression has type "Optional[A]", variable has type "str") [case testRecursiveAliasesProhibitBadAliases] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def from typing import Union, Type, List, TypeVar NR = List[int] @@ -440,7 +422,7 @@ reveal_type(d) # N: Revealed type is "Any" [builtins fixtures/isinstancelist.pyi] [case testBasicRecursiveNamedTuple] -# flags: --enable-recursive-aliases +# flags: --strict-optional from typing import NamedTuple, Optional NT = NamedTuple("NT", [("x", Optional[NT]), ("y", int)]) @@ -454,7 +436,6 @@ if nt.x is not None: [builtins fixtures/tuple.pyi] [case testBasicRecursiveNamedTupleSpecial] -# flags: --enable-recursive-aliases from typing import NamedTuple, TypeVar, Tuple NT = NamedTuple("NT", [("x", NT), ("y", int)]) @@ -476,7 +457,7 @@ reveal_type(f(tnt, nt)) # N: Revealed type is "builtins.tuple[Any, ...]" [builtins fixtures/tuple.pyi] [case testBasicRecursiveNamedTupleClass] -# flags: --enable-recursive-aliases +# flags: --strict-optional from typing import NamedTuple, Optional class NT(NamedTuple): @@ -493,7 +474,6 @@ if nt.x is not None: [builtins fixtures/tuple.pyi] [case testRecursiveRegularTupleClass] -# flags: --enable-recursive-aliases from typing import Tuple x: B @@ -505,7 +485,6 @@ reveal_type(b.x) # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] [case testRecursiveTupleClassesNewType] -# flags: --enable-recursive-aliases from typing import Tuple, NamedTuple, NewType x: C @@ -528,7 +507,8 @@ reveal_type(bnt.y) # N: Revealed type is "builtins.int" -- Tests duplicating some existing named tuple tests with recursive aliases enabled [case testMutuallyRecursiveNamedTuples] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def + from typing import Tuple, NamedTuple, TypeVar, Union A = NamedTuple('A', [('x', str), ('y', Tuple[B, ...])]) @@ -547,7 +527,6 @@ y: str = x # E: Incompatible types in assignment (expression has type "Union[st [builtins fixtures/tuple.pyi] [case testMutuallyRecursiveNamedTuplesJoin] -# flags: --enable-recursive-aliases from typing import NamedTuple, Tuple class B(NamedTuple): @@ -560,11 +539,17 @@ m: A s: str = n.x # E: Incompatible types in assignment (expression has type "Tuple[A, int]", variable has type "str") reveal_type(m[0]) # N: Revealed type is "builtins.str" lst = [m, n] -reveal_type(lst[0]) # N: Revealed type is "Tuple[builtins.object, builtins.object]" + +# Unfortunately, join of two recursive types is not very precise. +reveal_type(lst[0]) # N: Revealed type is "builtins.object" + +# These just should not crash +lst1 = [m] +lst2 = [m, m] +lst3 = [m, m, m] [builtins fixtures/tuple.pyi] [case testMutuallyRecursiveNamedTuplesClasses] -# flags: --enable-recursive-aliases from typing import NamedTuple, Tuple class B(NamedTuple): @@ -587,7 +572,7 @@ t = m # E: Incompatible types in assignment (expression has type "B", variable [builtins fixtures/tuple.pyi] [case testMutuallyRecursiveNamedTuplesCalls] -# flags: --enable-recursive-aliases +# flags: --disable-error-code used-before-def from typing import NamedTuple B = NamedTuple('B', [('x', A), ('y', int)]) @@ -600,7 +585,6 @@ f(n) # E: Argument 1 to "f" has incompatible type "A"; expected "B" [builtins fixtures/tuple.pyi] [case testNoRecursiveTuplesAtFunctionScope] -# flags: --enable-recursive-aliases from typing import NamedTuple, Tuple def foo() -> None: class B(NamedTuple): @@ -608,11 +592,10 @@ def foo() -> None: # N: Recursive types are not allowed at function scope y: int b: B - reveal_type(b) # N: Revealed type is "Tuple[Any, builtins.int, fallback=__main__.B@4]" + reveal_type(b) # N: Revealed type is "Tuple[Any, builtins.int, fallback=__main__.B@3]" [builtins fixtures/tuple.pyi] [case testBasicRecursiveGenericNamedTuple] -# flags: --enable-recursive-aliases from typing import Generic, NamedTuple, TypeVar, Union T = TypeVar("T", covariant=True) @@ -636,7 +619,6 @@ reveal_type(last(ntb)) # N: Revealed type is "__main__.B" [builtins fixtures/tuple.pyi] [case testBasicRecursiveTypedDictClass] -# flags: --enable-recursive-aliases from typing import TypedDict class TD(TypedDict): @@ -650,7 +632,6 @@ s: str = td["y"] # E: Incompatible types in assignment (expression has type "TD [typing fixtures/typing-typeddict.pyi] [case testBasicRecursiveTypedDictCall] -# flags: --enable-recursive-aliases from typing import TypedDict TD = TypedDict("TD", {"x": int, "y": TD}) @@ -668,7 +649,6 @@ td = td3 # E: Incompatible types in assignment (expression has type "TD3", vari [typing fixtures/typing-typeddict.pyi] [case testBasicRecursiveTypedDictExtending] -# flags: --enable-recursive-aliases from typing import TypedDict class TDA(TypedDict): @@ -689,7 +669,6 @@ reveal_type(td) # N: Revealed type is "TypedDict('__main__.TD', {'xb': builtins [typing fixtures/typing-typeddict.pyi] [case testRecursiveTypedDictCreation] -# flags: --enable-recursive-aliases from typing import TypedDict, Optional class TD(TypedDict): @@ -705,7 +684,7 @@ itd2 = TD(x=0, y=TD(x=0, y=TD(x=0, y=None))) [typing fixtures/typing-typeddict.pyi] [case testRecursiveTypedDictMethods] -# flags: --enable-recursive-aliases +# flags: --strict-optional from typing import TypedDict class TD(TypedDict, total=False): @@ -725,7 +704,6 @@ td.update({"x": 0, "y": {"x": 1, "y": {"x": 2, "y": 42}}}) # E: Incompatible ty [typing fixtures/typing-typeddict.pyi] [case testRecursiveTypedDictSubtyping] -# flags: --enable-recursive-aliases from typing import TypedDict class TDA1(TypedDict): @@ -752,7 +730,6 @@ fb(tda1) # E: Argument 1 to "fb" has incompatible type "TDA1"; expected "TDB" [typing fixtures/typing-typeddict.pyi] [case testRecursiveTypedDictJoin] -# flags: --enable-recursive-aliases from typing import TypedDict, TypeVar class TDA1(TypedDict): @@ -778,7 +755,6 @@ reveal_type(f(tda1, tdb)) # N: Revealed type is "TypedDict({})" [typing fixtures/typing-typeddict.pyi] [case testBasicRecursiveGenericTypedDict] -# flags: --enable-recursive-aliases from typing import TypedDict, TypeVar, Generic, Optional, List T = TypeVar("T") @@ -794,7 +770,6 @@ reveal_type(collect({"left": {"right": {"value": 0}}})) # N: Revealed type is " [typing fixtures/typing-typeddict.pyi] [case testRecursiveGenericTypedDictExtending] -# flags: --enable-recursive-aliases from typing import TypedDict, Generic, TypeVar, List T = TypeVar("T") @@ -810,3 +785,115 @@ std: STD[str] reveal_type(std) # N: Revealed type is "TypedDict('__main__.STD', {'val': builtins.str, 'other': ..., 'sval': builtins.str, 'one': TypedDict('__main__.TD', {'val': builtins.str, 'other': ...})})" [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testRecursiveClassLevelAlias] +# flags: --strict-optional +from typing import Union, Sequence + +class A: + Children = Union[Sequence['Children'], 'A', None] +x: A.Children +reveal_type(x) # N: Revealed type is "Union[typing.Sequence[...], __main__.A, None]" + +class B: + Foo = Sequence[Bar] + Bar = Sequence[Foo] +y: B.Foo +reveal_type(y) # N: Revealed type is "typing.Sequence[typing.Sequence[...]]" +[builtins fixtures/tuple.pyi] + +[case testNoCrashOnRecursiveTupleFallback] +from typing import Union, Tuple + +Tree1 = Union[str, Tuple[Tree1]] +Tree2 = Union[str, Tuple[Tree2, Tree2]] +Tree3 = Union[str, Tuple[Tree3, Tree3, Tree3]] + +def test1() -> Tree1: + return 42 # E: Incompatible return value type (got "int", expected "Union[str, Tuple[Tree1]]") +def test2() -> Tree2: + return 42 # E: Incompatible return value type (got "int", expected "Union[str, Tuple[Tree2, Tree2]]") +def test3() -> Tree3: + return 42 # E: Incompatible return value type (got "int", expected "Union[str, Tuple[Tree3, Tree3, Tree3]]") +[builtins fixtures/tuple.pyi] + +[case testRecursiveDoubleUnionNoCrash] +from typing import Tuple, Union, Callable, Sequence + +K = Union[int, Tuple[Union[int, K]]] +L = Union[int, Callable[[], Union[int, L]]] +M = Union[int, Sequence[Union[int, M]]] + +x: K +x = x +y: L +y = y +z: M +z = z + +x = y # E: Incompatible types in assignment (expression has type "L", variable has type "K") +z = x # OK +[builtins fixtures/tuple.pyi] + +[case testRecursiveInstanceInferenceNoCrash] +from typing import Sequence, TypeVar, Union + +class C(Sequence[C]): ... + +T = TypeVar("T") +def foo(x: T) -> C: ... + +Nested = Union[C, Sequence[Nested]] +x: Nested = foo(42) + +[case testNoRecursiveExpandInstanceUnionCrash] +from typing import List, Union + +class Tag(List[Union[Tag, List[Tag]]]): ... +Tag() + +[case testNoRecursiveExpandInstanceUnionCrashGeneric] +from typing import Generic, Iterable, TypeVar, Union + +ValueT = TypeVar("ValueT") +class Recursive(Iterable[Union[ValueT, Recursive[ValueT]]]): + pass + +class Base(Generic[ValueT]): + def __init__(self, element: ValueT): + pass +class Sub(Base[Union[ValueT, Recursive[ValueT]]]): + pass + +x: Iterable[str] +reveal_type(Sub) # N: Revealed type is "def [ValueT] (element: Union[ValueT`1, __main__.Recursive[ValueT`1]]) -> __main__.Sub[ValueT`1]" +reveal_type(Sub(x)) # N: Revealed type is "__main__.Sub[typing.Iterable[builtins.str]]" + +[case testNoRecursiveExpandInstanceUnionCrashInference] +# flags: --disable-error-code used-before-def +from typing import TypeVar, Union, Generic, List + +T = TypeVar("T") +InList = Union[T, InListRecurse[T]] +class InListRecurse(Generic[T], List[InList[T]]): ... + +def list_thing(transforming: InList[T]) -> T: + ... +reveal_type(list_thing([5])) # N: Revealed type is "builtins.list[builtins.int]" + +[case testRecursiveTypedDictWithList] +from typing import List +from typing_extensions import TypedDict + +Example = TypedDict("Example", {"rec": List["Example"]}) +e: Example +reveal_type(e) # N: Revealed type is "TypedDict('__main__.Example', {'rec': builtins.list[...]})" +[builtins fixtures/dict.pyi] + +[case testRecursiveNamedTupleWithList] +from typing import List, NamedTuple + +Example = NamedTuple("Example", [("rec", List["Example"])]) +e: Example +reveal_type(e) # N: Revealed type is "Tuple[builtins.list[...], fallback=__main__.Example]" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-redefine.test b/test-data/unit/check-redefine.test index e73f715c9ec0..e3f1b976d4e9 100644 --- a/test-data/unit/check-redefine.test +++ b/test-data/unit/check-redefine.test @@ -285,7 +285,7 @@ def f() -> None: import typing as m m = 1 # E: Incompatible types in assignment (expression has type "int", variable has type Module) n = 1 - import typing as n # E: Name "n" already defined on line 5 + import typing as n # E: Incompatible import of "n" (imported name has type Module, local name has type "int") [builtins fixtures/module.pyi] [case testRedefineLocalWithTypeAnnotation] diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 506e8bfe8ab1..555cef3641f8 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -128,6 +128,120 @@ reveal_type(cast(A, C()).copy()) # N: Revealed type is "__main__.A" [builtins fixtures/bool.pyi] +[case testSelfTypeOverrideCompatibility] +from typing import overload, TypeVar, Generic + +T = TypeVar("T") + +class A(Generic[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + def f(self): ... + +class B2(A[T]): + @overload + def f(self: A[int]) -> int: ... + @overload + def f(self: A[str]) -> str: ... + @overload + def f(self: A[bytes]) -> bytes: ... + def f(self): ... + +class C(A[int]): + def f(self) -> int: ... + +class D(A[str]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + +class E(A[T]): + def f(self) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> int + + +class F(A[bytes]): + # Note there's an argument to be made that this is actually compatible with the supertype + def f(self) -> bytes: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: @overload \ + # N: def f(self) -> str \ + # N: Subclass: \ + # N: def f(self) -> bytes + +class G(A): + def f(self): ... + +class H(A[int]): + def f(self): ... + +class I(A[int]): + def f(*args): ... + +class J(A[int]): + def f(self, arg) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self) -> int \ + # N: Subclass: \ + # N: def f(self, arg: Any) -> int + +[builtins fixtures/tuple.pyi] + +[case testSelfTypeOverrideCompatibilityTypeVar-xfail] +from typing import overload, TypeVar, Union + +AT = TypeVar("AT", bound="A") + +class A: + @overload + def f(self: AT, x: int) -> AT: ... + @overload + def f(self, x: str) -> None: ... + @overload + def f(self: AT) -> bytes: ... + def f(*a, **kw): ... + +class B(A): + @overload # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None \ + # N: @overload \ + # N: def f(self) -> bytes \ + # N: Subclass: \ + # N: @overload \ + # N: def f(self, x: int) -> B \ + # N: @overload \ + # N: def f(self, x: str) -> None + def f(self, x: int) -> B: ... + @overload + def f(self, x: str) -> None: ... + def f(*a, **kw): ... +[builtins fixtures/dict.pyi] + [case testSelfTypeSuper] from typing import TypeVar, cast @@ -314,7 +428,7 @@ class C: [case testSelfTypeNew] from typing import TypeVar, Type -T = TypeVar('T', bound=A) +T = TypeVar('T', bound='A') class A: def __new__(cls: Type[T]) -> T: return cls() @@ -417,15 +531,15 @@ reveal_type(B().ft()) # N: Revealed type is "Tuple[builtins.int, builtins.int, [builtins fixtures/property.pyi] [case testSelfTypeProperSupertypeAttributeMeta] -from typing import Callable, TypeVar, Type +from typing import Callable, TypeVar, Type, ClassVar T = TypeVar('T') class A(type): @property def g(cls: object) -> int: return 0 @property def gt(cls: T) -> T: return cls - f: Callable[[object], int] - ft: Callable[[T], T] + f: ClassVar[Callable[[object], int]] + ft: ClassVar[Callable[[T], T]] class B(A): pass @@ -678,6 +792,26 @@ reveal_type(f.copy()) # N: Revealed type is "__main__.File" b.copy() # E: Invalid self argument "Bad" to attribute function "copy" with type "Callable[[T], T]" [builtins fixtures/tuple.pyi] +[case testMixinProtocolSuper] +from typing import Protocol + +class Base(Protocol): + def func(self) -> int: + ... + +class TweakFunc: + def func(self: Base) -> int: + return reveal_type(super().func()) # N: Revealed type is "builtins.int" + +class Good: + def func(self) -> int: ... +class C(TweakFunc, Good): pass +C().func() # OK + +class Bad: + def func(self) -> str: ... +class CC(TweakFunc, Bad): pass # E: Definition of "func" in base class "TweakFunc" is incompatible with definition in base class "Bad" + [case testBadClassLevelDecoratorHack] from typing_extensions import Protocol from typing import TypeVar, Any @@ -864,7 +998,7 @@ reveal_type(ab.x) # N: Revealed type is "builtins.int" from typing import Generic, List, Optional, TypeVar, Any Q = TypeVar("Q") -T = TypeVar("T", bound=Super[Any]) +T = TypeVar("T", bound='Super[Any]') class Super(Generic[Q]): @classmethod @@ -1023,7 +1157,7 @@ from typing import Optional, Type, TypeVar, overload, Union Id = int -A = TypeVar("A", bound=AClass) +A = TypeVar("A", bound='AClass') class AClass: @overload @@ -1239,3 +1373,435 @@ class Test(Generic[T]): a: deque[List[T]] # previously this failed with 'Incompatible types in assignment (expression has type "deque[List[List[T]]]", variable has type "deque[List[T]]")' b: deque[List[T]] = a.copy() + +[case testTypingSelfBasic] +from typing import Self, List + +class C: + attr: List[Self] + def meth(self) -> List[Self]: ... + def test(self) -> Self: + if bool(): + return C() # E: Incompatible return value type (got "C", expected "Self") + else: + return self +class D(C): ... + +reveal_type(C.meth) # N: Revealed type is "def [Self <: __main__.C] (self: Self`0) -> builtins.list[Self`0]" +C.attr # E: Access to generic instance variables via class is ambiguous +reveal_type(D().meth()) # N: Revealed type is "builtins.list[__main__.D]" +reveal_type(D().attr) # N: Revealed type is "builtins.list[__main__.D]" + +[case testTypingSelfInvalidLocations] +from typing import Self, Callable + +var: Self # E: Self type is only allowed in annotations within class definition +reveal_type(var) # N: Revealed type is "Any" + +def foo() -> Self: ... # E: Self type is only allowed in annotations within class definition +reveal_type(foo) # N: Revealed type is "def () -> Any" + +bad: Callable[[Self], Self] # E: Self type is only allowed in annotations within class definition +reveal_type(bad) # N: Revealed type is "def (Any) -> Any" + +def func() -> None: + var: Self # E: Self type is only allowed in annotations within class definition + +class C(Self): ... # E: Self type is only allowed in annotations within class definition + +[case testTypingSelfInvalidArgs] +from typing import Self, List + +class C: + x: Self[int] # E: Self type cannot have type arguments + def meth(self) -> List[Self[int]]: # E: Self type cannot have type arguments + ... + +[case testTypingSelfConflict] +from typing import Self, TypeVar, Tuple + +T = TypeVar("T") +class C: + def meth(self: T) -> Tuple[Self, T]: ... # E: Method cannot have explicit self annotation and Self type +reveal_type(C().meth()) # N: Revealed type is "Tuple[, __main__.C]" +[builtins fixtures/property.pyi] + +[case testTypingSelfProperty] +from typing import Self, Tuple +class C: + @property + def attr(self) -> Tuple[Self, ...]: ... +class D(C): ... + +reveal_type(D().attr) # N: Revealed type is "builtins.tuple[__main__.D, ...]" +[builtins fixtures/property.pyi] + +[case testTypingSelfCallableVar] +from typing import Self, Callable + +class C: + x: Callable[[Self], Self] + def meth(self) -> Callable[[Self], Self]: ... +class D(C): ... + +reveal_type(C().x) # N: Revealed type is "def (__main__.C) -> __main__.C" +reveal_type(D().x) # N: Revealed type is "def (__main__.D) -> __main__.D" +reveal_type(D().meth()) # N: Revealed type is "def (__main__.D) -> __main__.D" + +[case testTypingSelfClassMethod] +from typing import Self + +class C: + @classmethod + def meth(cls) -> Self: ... + @staticmethod + def bad() -> Self: ... # E: Static methods cannot use Self type \ + # E: A function returning TypeVar should receive at least one argument containing the same TypeVar \ + # N: Consider using the upper bound "C" instead + +class D(C): ... +reveal_type(D.meth()) # N: Revealed type is "__main__.D" +reveal_type(D.bad()) # N: Revealed type is "" +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfOverload] +from typing import Self, overload, Union + +class C: + @overload + def foo(self, other: Self) -> Self: ... + @overload + def foo(self, other: int) -> int: ... + def foo(self, other: Union[Self, int]) -> Union[Self, int]: + return other +class D(C): ... +reveal_type(D().foo) # N: Revealed type is "Overload(def (other: __main__.D) -> __main__.D, def (other: builtins.int) -> builtins.int)" + +[case testTypingSelfNestedInAlias] +from typing import Generic, Self, TypeVar, List, Tuple + +T = TypeVar("T") +Pairs = List[Tuple[T, T]] + +class C(Generic[T]): + def pairs(self) -> Pairs[Self]: ... +class D(C[T]): ... +reveal_type(D[int]().pairs()) # N: Revealed type is "builtins.list[Tuple[__main__.D[builtins.int], __main__.D[builtins.int]]]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfOverrideVar] +from typing import Self, TypeVar, Generic + +T = TypeVar("T") +class C(Generic[T]): + x: Self + +class D(C[int]): + x: D +class Bad(C[int]): + x: C[int] # E: Incompatible types in assignment (expression has type "C[int]", base class "C" defined the type as "Bad") + +[case testTypingSelfOverrideVarMulti] +from typing import Self + +class C: + x: Self +class D: + x: C +class E: + x: Good + +class Bad(D, C): # E: Definition of "x" in base class "D" is incompatible with definition in base class "C" + ... +class Good(E, C): + ... + +[case testTypingSelfAlternativeGenericConstructor] +from typing import Self, Generic, TypeVar, Tuple + +T = TypeVar("T") +class C(Generic[T]): + def __init__(self, val: T) -> None: ... + @classmethod + def pair(cls, val: T) -> Tuple[Self, Self]: + return (cls(val), C(val)) # E: Incompatible return value type (got "Tuple[Self, C[T]]", expected "Tuple[Self, Self]") + +class D(C[int]): pass +reveal_type(C.pair(42)) # N: Revealed type is "Tuple[__main__.C[builtins.int], __main__.C[builtins.int]]" +reveal_type(D.pair("no")) # N: Revealed type is "Tuple[__main__.D, __main__.D]" \ + # E: Argument 1 to "pair" of "C" has incompatible type "str"; expected "int" +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfMixedTypeVars] +from typing import Self, TypeVar, Generic, Tuple + +T = TypeVar("T") +S = TypeVar("S") + +class C(Generic[T]): + def meth(self, arg: S) -> Tuple[Self, S, T]: ... + +class D(C[int]): ... + +c: C[int] +d: D +reveal_type(c.meth("test")) # N: Revealed type is "Tuple[__main__.C[builtins.int], builtins.str, builtins.int]" +reveal_type(d.meth("test")) # N: Revealed type is "Tuple[__main__.D, builtins.str, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfRecursiveInit] +from typing import Self + +class C: + def __init__(self, other: Self) -> None: ... +class D(C): ... + +reveal_type(C) # N: Revealed type is "def (other: __main__.C) -> __main__.C" +reveal_type(D) # N: Revealed type is "def (other: __main__.D) -> __main__.D" + +[case testTypingSelfCorrectName] +from typing import Self, List + +class C: + Self = List[C] + def meth(self) -> Self: ... +reveal_type(C.meth) # N: Revealed type is "def (self: __main__.C) -> builtins.list[__main__.C]" + +[case testTypingSelfClassVar] +from typing import Self, ClassVar, Generic, TypeVar + +class C: + DEFAULT: ClassVar[Self] +reveal_type(C.DEFAULT) # N: Revealed type is "__main__.C" + +T = TypeVar("T") +class G(Generic[T]): + BAD: ClassVar[Self] # E: ClassVar cannot contain Self type in generic classes +reveal_type(G.BAD) # N: Revealed type is "__main__.G[Any]" + +[case testTypingSelfMetaClassDisabled] +from typing import Self + +class Meta(type): + def meth(cls) -> Self: ... # E: Self type cannot be used in a metaclass + +[case testTypingSelfNonAnnotationUses] +from typing import Self, List, cast + +class C: + A = List[Self] # E: Self type cannot be used in type alias target + B = cast(Self, ...) + def meth(self) -> A: ... + +class D(C): ... +reveal_type(D().meth()) # N: Revealed type is "builtins.list[Any]" +reveal_type(D().B) # N: Revealed type is "__main__.D" + +[case testTypingSelfInternalSafe] +from typing import Self + +class C: + x: Self + def __init__(self, x: C) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "C", variable has type "Self") + +[case testTypingSelfRedundantAllowed] +from typing import Self, Type + +class C: + def f(self: Self) -> Self: + d: Defer + class Defer: ... + return self + + @classmethod + def g(cls: Type[Self]) -> Self: + d: DeferAgain + class DeferAgain: ... + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfRedundantWarning] +# mypy: enable-error-code="redundant-self" + +from typing import Self, Type + +class C: + def copy(self: Self) -> Self: # E: Redundant "Self" annotation for the first method argument + d: Defer + class Defer: ... + return self + + @classmethod + def g(cls: Type[Self]) -> Self: # E: Redundant "Self" annotation for the first method argument + d: DeferAgain + class DeferAgain: ... + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfAssertType] +from typing import Self, assert_type + +class C: + def foo(self) -> None: + assert_type(self, Self) # E: Expression is of type "C", not "Self" + assert_type(C(), Self) # E: Expression is of type "C", not "Self" + + def bar(self) -> Self: + assert_type(self, Self) # OK + assert_type(C(), Self) # E: Expression is of type "C", not "Self" + return self + +[case testTypingSelfTypeVarClash] +from typing import Self, TypeVar, Tuple + +S = TypeVar("S") +class C: + def bar(self) -> Self: ... + def foo(self, x: S) -> Tuple[Self, S]: ... + +reveal_type(C.foo) # N: Revealed type is "def [Self <: __main__.C, S] (self: Self`0, x: S`-1) -> Tuple[Self`0, S`-1]" +reveal_type(C().foo(42)) # N: Revealed type is "Tuple[__main__.C, builtins.int]" +[builtins fixtures/tuple.pyi] + +[case testTypingSelfTypeVarClashAttr] +from typing import Self, TypeVar, Tuple, Callable + +class Defer(This): ... + +S = TypeVar("S") +class C: + def bar(self) -> Self: ... + foo: Callable[[S, Self], Tuple[Self, S]] + +reveal_type(C().foo) # N: Revealed type is "def [S] (S`1, __main__.C) -> Tuple[__main__.C, S`1]" +reveal_type(C().foo(42, C())) # N: Revealed type is "Tuple[__main__.C, builtins.int]" +class This: ... +[builtins fixtures/tuple.pyi] + +[case testTypingSelfAttrOldVsNewStyle] +from typing import Self, TypeVar + +T = TypeVar("T", bound='C') +class C: + x: Self + def foo(self: T) -> T: + return self.x + def bar(self: T) -> T: + self.x = self + return self + def baz(self: Self) -> None: + self.x = self + def bad(self) -> None: + # This is unfortunate, but required by PEP 484 + self.x = self # E: Incompatible types in assignment (expression has type "C", variable has type "Self") + +[case testTypingSelfClashInBodies] +from typing import Self, TypeVar + +T = TypeVar("T") +class C: + def very_bad(self, x: T) -> None: + self.x = x # E: Incompatible types in assignment (expression has type "T", variable has type "Self") + x: Self + def baz(self: Self, x: T) -> None: + y: T = x + +[case testTypingSelfClashUnrelated] +from typing import Self, Generic, TypeVar + +class B: ... + +T = TypeVar("T", bound=B) +class C(Generic[T]): + def __init__(self, val: T) -> None: + self.val = val + def foo(self) -> Self: ... + +def test(x: C[T]) -> T: + reveal_type(x.val) # N: Revealed type is "T`-1" + return x.val + +[case testTypingSelfGenericBound] +from typing import Self, Generic, TypeVar + +T = TypeVar("T") +class C(Generic[T]): + val: T + def foo(self) -> Self: + reveal_type(self.val) # N: Revealed type is "T`1" + return self + +[case testTypingSelfDifferentImport] +import typing as t + +class Foo: + def foo(self) -> t.Self: + return self + @classmethod + def bar(cls) -> t.Self: + return cls() +[builtins fixtures/classmethod.pyi] + +[case testTypingSelfAllowAliasUseInFinalClasses] +from typing import Self, final + +@final +class C: + def meth(self) -> Self: + return C() # OK for final classes + +[case testTypingSelfCallableClassVar] +from typing import Self, ClassVar, Callable, TypeVar + +class C: + f: ClassVar[Callable[[Self], Self]] +class D(C): ... + +reveal_type(D.f) # N: Revealed type is "def (__main__.D) -> __main__.D" +reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" + +[case testSelfTypeCallableClassVarOldStyle] +from typing import ClassVar, Callable, TypeVar + +T = TypeVar("T") +class C: + f: ClassVar[Callable[[T], T]] + +class D(C): ... + +reveal_type(D.f) # N: Revealed type is "def [T] (T`-1) -> T`-1" +reveal_type(D().f) # N: Revealed type is "def () -> __main__.D" + +[case testTypingSelfOnSuperTypeVarValues] +from typing import Self, Generic, TypeVar + +T = TypeVar("T", int, str) + +class B: + def copy(self) -> Self: ... +class C(B, Generic[T]): + def copy(self) -> Self: + inst = super().copy() + reveal_type(inst) # N: Revealed type is "Self`0" + return inst + +[case testTypingSelfWithValuesExpansion] +from typing import Self, Generic, TypeVar + +class A: pass +class B: pass +T = TypeVar("T", A, B) + +class C(Generic[T]): + val: T + def foo(self, x: T) -> None: ... + def bar(self, x: T) -> Self: + reveal_type(self.foo) # N: Revealed type is "def (x: __main__.A)" \ + # N: Revealed type is "def (x: __main__.B)" + self.foo(x) + return self + def baz(self: Self, x: T) -> None: + reveal_type(self.val) # N: Revealed type is "__main__.A" \ + # N: Revealed type is "__main__.B" + self.val = x diff --git a/test-data/unit/check-slots.test b/test-data/unit/check-slots.test index 96e4eba3c966..8beb0d8bf3f7 100644 --- a/test-data/unit/check-slots.test +++ b/test-data/unit/check-slots.test @@ -517,3 +517,13 @@ class A: self.b = 2 self.missing = 3 [builtins fixtures/tuple.pyi] + +[case testSlotsWithClassVar] +from typing import ClassVar +class X: + __slots__ = ('a',) + a: int +x = X() +X.a # E: "a" in __slots__ conflicts with class variable access +x.a +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index 9768f43d0bb1..b9551870ddfc 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -140,20 +140,15 @@ main:5: error: Incompatible types in assignment (expression has type "bool", var main:7: error: Incompatible types in assignment (expression has type "bool", variable has type "A") [case testForStatement] +class A: pass a = None # type: A b = None # type: object for a in [A()]: - a = b # Fail + a = b # E: Incompatible types in assignment (expression has type "object", variable has type "A") else: - a = b # Fail - -class A: pass + a = b # E: Incompatible types in assignment (expression has type "object", variable has type "A") [builtins fixtures/list.pyi] -[out] -main:5: error: Incompatible types in assignment (expression has type "object", variable has type "A") -main:7: error: Incompatible types in assignment (expression has type "object", variable has type "A") - [case testBreakStatement] import typing while None: @@ -520,15 +515,15 @@ class B: pass main:7: error: Incompatible types in assignment (expression has type "object", variable has type "BaseException") [case testTypeErrorInBlock] +class A: pass +class B: pass while object: x = None # type: A if int(): x = object() # E: Incompatible types in assignment (expression has type "object", variable has type "A") x = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") -class A: pass -class B: pass - [case testTypeErrorInvolvingBaseException] +class A: pass x, a = None, None # type: (BaseException, A) if int(): @@ -541,7 +536,6 @@ if int(): x = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") if int(): x = BaseException() -class A: pass [builtins fixtures/exception.pyi] [case testSimpleTryExcept2] @@ -557,49 +551,38 @@ main:5: error: Incompatible types in assignment (expression has type "object", v [case testBaseClassAsExceptionTypeInExcept] import typing +class Err(BaseException): pass try: pass except Err as e: - e = BaseException() # Fail + e = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") e = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:5: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testMultipleExceptHandlers] import typing +class Err(BaseException): pass try: pass except BaseException as e: pass except Err as f: - f = BaseException() # Fail + f = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") f = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:7: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testTryExceptStatement] import typing +class A: pass +class B: pass +class Err(BaseException): pass try: - a = B() # type: A # Fail + a = B() # type: A # E: Incompatible types in assignment (expression has type "B", variable has type "A") except BaseException as e: - e = A() # Fail + e = A() # E: Incompatible types in assignment (expression has type "A", variable has type "BaseException") e = Err() except Err as f: - f = BaseException() # Fail + f = BaseException() # E: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") f = Err() -class A: pass -class B: pass -class Err(BaseException): pass [builtins fixtures/exception.pyi] -[out] -main:3: error: Incompatible types in assignment (expression has type "B", variable has type "A") -main:5: error: Incompatible types in assignment (expression has type "A", variable has type "BaseException") -main:8: error: Incompatible types in assignment (expression has type "BaseException", variable has type "Err") - [case testTryExceptWithinFunction] import typing def f() -> None: @@ -823,7 +806,7 @@ try: pass except E1 as e: pass try: pass except E2 as e: pass -e + 1 # E: Trying to read deleted variable "e" +e + 1 # E: Trying to read deleted variable "e" # E: Name "e" is used before definition e = E1() # E: Assignment to variable "e" outside except: block [builtins fixtures/exception.pyi] @@ -945,6 +928,18 @@ x = f() main:10: note: Revealed type is "builtins.int" main:15: note: Revealed type is "builtins.str" +[case testExceptionVariableWithDisallowAnyExprInDeferredNode] +# flags: --disallow-any-expr +def f() -> int: + x + try: + pass + except Exception as ex: + pass + return 0 +x = f() +[builtins fixtures/exception.pyi] + [case testArbitraryExpressionAsExceptionType] import typing a = BaseException @@ -1073,6 +1068,10 @@ a = A() del a.x, a.y # E: "A" has no attribute "y" [builtins fixtures/tuple.pyi] +[case testDelStmtWithTypeInfo] +class Foo: ... +del Foo +Foo + 1 # E: Trying to read deleted variable "Foo" [case testDelStatementWithAssignmentSimple] a = 1 @@ -2036,16 +2035,12 @@ foo = int [case testTypeOfGlobalUsed] import typing +class A(): pass +class B(): pass g = A() def f() -> None: global g - g = B() - -class A(): pass -class B(): pass -[out] -main:5: error: Incompatible types in assignment (expression has type "B", variable has type "A") - + g = B() # E: Incompatible types in assignment (expression has type "B", variable has type "A") [case testTypeOfNonlocalUsed] import typing def f() -> None: @@ -2170,7 +2165,7 @@ N = TypedDict('N', {'x': int}) [out] [case testGlobalWithoutInitialization] - +# flags: --disable-error-code=annotation-unchecked from typing import List def foo() -> None: @@ -2184,3 +2179,36 @@ def foo2(): bar2 = [] # type: List[str] bar2 [builtins fixtures/list.pyi] + +[case testNoteUncheckedAnnotation] +def foo(): + x: int = "no" # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs + y = "no" # type: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs + z: int # N: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs + +[case testGeneratorUnion] +from typing import Generator, Union + +class A: pass +class B: pass + +def foo(x: int) -> Union[Generator[A, None, None], Generator[B, None, None]]: + yield x # E: Incompatible types in "yield" (actual type "int", expected type "Union[A, B]") + +[case testNoCrashOnStarRightHandSide] +x = *(1, 2, 3) # E: Can use starred expression only as assignment target +[builtins fixtures/tuple.pyi] + + +[case testTypingExtensionsSuggestion] +from typing import _FutureFeatureFixture + +# This import is only needed in tests. In real life, mypy will always have typing_extensions in its +# build due to its pervasive use in typeshed. This assumption may one day prove False, but when +# that day comes this suggestion will also be less helpful than it is today. +import typing_extensions +[out] +main:1: error: Module "typing" has no attribute "_FutureFeatureFixture" +main:1: note: Use `from typing_extensions import _FutureFeatureFixture` instead +main:1: note: See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-super.test b/test-data/unit/check-super.test index b9f6638d391a..b3379e505be7 100644 --- a/test-data/unit/check-super.test +++ b/test-data/unit/check-super.test @@ -365,7 +365,7 @@ class A: def f(self) -> None: pass class B(A): - def g() -> None: # E: Method must have at least one argument + def g() -> None: # E: Method must have at least one argument. Did you forget the "self" argument? super().f() # E: super() requires one or more positional arguments in enclosing function def h(self) -> None: def a() -> None: @@ -380,3 +380,39 @@ class A: class B(A): def h(self, t: Type[None]) -> None: super(t, self).f # E: Unsupported argument 1 for "super" + +[case testSuperSelfTypeInstanceMethod] +from typing import TypeVar, Type + +T = TypeVar("T", bound="A") + +class A: + def foo(self: T) -> T: ... + +class B(A): + def foo(self: T) -> T: + reveal_type(super().foo()) # N: Revealed type is "T`-1" + return super().foo() + +[case testSuperSelfTypeClassMethod] +from typing import TypeVar, Type + +T = TypeVar("T", bound="A") + +class A: + @classmethod + def foo(cls: Type[T]) -> T: ... + +class B(A): + @classmethod + def foo(cls: Type[T]) -> T: + reveal_type(super().foo()) # N: Revealed type is "T`-1" + return super().foo() +[builtins fixtures/classmethod.pyi] + +[case testWrongSuperOutsideMethodNoCrash] +class B: + x: int +class C1(B): ... +class C2(B): ... +super(C1, C2).x # E: Argument 2 for "super" not an instance of argument 1 diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index c6ae9e808f8a..266bfbf97888 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -164,10 +164,10 @@ class C(B): pass [case testVoidValueInTuple] import typing +def f() -> None: pass + (None, f()) # E: "f" does not return a value (f(), None) # E: "f" does not return a value - -def f() -> None: pass [builtins fixtures/tuple.pyi] @@ -247,15 +247,16 @@ class B: pass [case testAssigningToTupleItems] from typing import Tuple + +class A: pass +class B: pass + t = None # type: Tuple[A, B] n = 0 t[0] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") t[2] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") t[n] = A() # E: Unsupported target for indexed assignment ("Tuple[A, B]") - -class A: pass -class B: pass [builtins fixtures/tuple.pyi] @@ -532,13 +533,12 @@ if int(): [case testAssignmentToStarFromAny] from typing import Any, cast +class C: pass + a, c = cast(Any, 1), C() p, *q = a c = a c = q - -class C: pass - [case testAssignmentToComplexStar] from typing import List li = None # type: List[int] @@ -572,6 +572,7 @@ class A: pass [case testAssignmentToStarFromTupleInference] from typing import List +class A: pass li = None # type: List[int] la = None # type: List[A] a, *l = A(), A() @@ -579,13 +580,14 @@ if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la - -class A: pass [builtins fixtures/list.pyi] [out] [case testAssignmentToStarFromListInference] from typing import List + +class A: pass + li = None # type: List[int] la = None # type: List[A] a, *l = [A(), A()] @@ -593,8 +595,6 @@ if int(): l = li # E: Incompatible types in assignment (expression has type "List[int]", variable has type "List[A]") if int(): l = la - -class A: pass [builtins fixtures/list.pyi] [out] @@ -710,6 +710,9 @@ class C: pass [case testTupleErrorMessages] +class A: + def __add__(self, x: 'A') -> 'A': pass +def f(x: 'A') -> None: pass a = None # type: A @@ -717,11 +720,6 @@ a = None # type: A a + (a, a) # E: Unsupported operand types for + ("A" and "Tuple[A, A]") f((a, a)) # E: Argument 1 to "f" has incompatible type "Tuple[A, A]"; expected "A" (a, a).foo # E: "Tuple[A, A]" has no attribute "foo" - -def f(x: 'A') -> None: pass - -class A: - def __add__(self, x: 'A') -> 'A': pass [builtins fixtures/tuple.pyi] [case testLargeTuplesInErrorMessages] @@ -776,6 +774,7 @@ class str: pass class bool: pass class type: pass class function: pass +class dict: pass -- For loop over tuple @@ -972,6 +971,17 @@ b = (1, 'x') a = (0, *b, '') [builtins fixtures/tuple.pyi] +[case testUnpackSyntaxError] +*foo # E: Can use starred expression only as assignment target +[builtins fixtures/tuple.pyi] + +[case testUnpackBases] +class A: ... +class B: ... +bases = (A, B) +class C(*bases): ... # E: Invalid base class +[builtins fixtures/tuple.pyi] + [case testTupleMeetTupleAny] from typing import Union, Tuple class A: pass @@ -1228,8 +1238,8 @@ y = "" reveal_type(t[x]) # N: Revealed type is "Union[builtins.int, builtins.str]" t[y] # E: No overload variant of "__getitem__" of "tuple" matches argument type "str" \ # N: Possible overload variants: \ - # N: def __getitem__(self, int) -> object \ - # N: def __getitem__(self, slice) -> Tuple[object, ...] + # N: def __getitem__(self, int, /) -> object \ + # N: def __getitem__(self, slice, /) -> Tuple[object, ...] [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index 95fe483ac116..d7cccd2d6ba6 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -197,7 +197,7 @@ Alias = Tuple[int, T] [out] [case testRecursiveAliasesErrors1] - +# flags: --disable-recursive-aliases # Recursive aliases are not supported yet. from typing import Type, Callable, Union @@ -206,7 +206,7 @@ B = Callable[[B], int] # E: Cannot resolve name "B" (possible cyclic definition) C = Type[C] # E: Cannot resolve name "C" (possible cyclic definition) [case testRecursiveAliasesErrors2] - +# flags: --disable-recursive-aliases --disable-error-code=used-before-def # Recursive aliases are not supported yet. from typing import Type, Callable, Union @@ -224,6 +224,7 @@ main:7: error: Cannot resolve name "C" (possible cyclic definition) main:9: note: Revealed type is "Union[Any, builtins.int]" [case testDoubleForwardAlias] +# flags: --disable-error-code=used-before-def from typing import List x: A A = List[B] @@ -233,6 +234,7 @@ reveal_type(x) # N: Revealed type is "builtins.list[builtins.list[builtins.int]] [out] [case testDoubleForwardAliasWithNamedTuple] +# flags: --disable-error-code=used-before-def from typing import List, NamedTuple x: A A = List[B] @@ -243,8 +245,7 @@ reveal_type(x[0].x) # N: Revealed type is "builtins.str" [out] [case testJSONAliasApproximation] - -# Recursive aliases are not supported yet. +# flags: --disable-recursive-aliases from typing import List, Union, Dict x: JSON # E: Cannot resolve name "JSON" (possible cyclic definition) JSON = Union[int, str, List[JSON], Dict[str, JSON]] # E: Cannot resolve name "JSON" (possible cyclic definition) @@ -255,6 +256,7 @@ if isinstance(x, list): [out] [case testForwardRefToTypeVar] +# flags: --disable-error-code=used-before-def from typing import TypeVar, List reveal_type(a) # N: Revealed type is "builtins.list[builtins.int]" @@ -445,7 +447,7 @@ A = Union[None] [case testAliasToClassMethod] from typing import TypeVar, Generic, Union, Type -T = TypeVar('T', bound=C) +T = TypeVar('T', bound='C') MYPY = False if MYPY: @@ -772,7 +774,6 @@ f(string, string) [typing fixtures/typing-medium.pyi] [case testForwardTypeVarRefWithRecursiveFlag] -# flags: --enable-recursive-aliases import c [file a.py] from typing import TypeVar, List, Any, Generic @@ -796,3 +797,234 @@ S = TypeVar("S") class C(Generic[S], List[Defer]): ... class Defer: ... [builtins fixtures/list.pyi] + +[case testClassLevelTypeAliasesInUnusualContexts] +from typing import Union +from typing_extensions import TypeAlias + +class Foo: pass + +NormalImplicit = Foo +NormalExplicit: TypeAlias = Foo +SpecialImplicit = Union[int, str] +SpecialExplicit: TypeAlias = Union[int, str] + +class Parent: + NormalImplicit = Foo + NormalExplicit: TypeAlias = Foo + SpecialImplicit = Union[int, str] + SpecialExplicit: TypeAlias = Union[int, str] + +class Child(Parent): pass + +p = Parent() +c = Child() + +# Use type aliases in a runtime context + +reveal_type(NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(SpecialExplicit) # N: Revealed type is "typing._SpecialForm" + +reveal_type(Parent.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(Parent.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(Parent.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Parent.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" + +reveal_type(Child.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(Child.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(Child.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(Child.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" + +reveal_type(p.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(p.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(p.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" + +reveal_type(c.NormalImplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(p.NormalExplicit) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(c.SpecialImplicit) # N: Revealed type is "typing._SpecialForm" +reveal_type(c.SpecialExplicit) # N: Revealed type is "typing._SpecialForm" + +# Use type aliases in a type alias context in a plausible way + +def plausible_top_1() -> NormalImplicit: pass +def plausible_top_2() -> NormalExplicit: pass +def plausible_top_3() -> SpecialImplicit: pass +def plausible_top_4() -> SpecialExplicit: pass +reveal_type(plausible_top_1) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(plausible_top_2) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(plausible_top_3) # N: Revealed type is "def () -> Union[builtins.int, builtins.str]" +reveal_type(plausible_top_4) # N: Revealed type is "def () -> Union[builtins.int, builtins.str]" + +def plausible_parent_1() -> Parent.NormalImplicit: pass # E: Variable "__main__.Parent.NormalImplicit" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +def plausible_parent_2() -> Parent.NormalExplicit: pass +def plausible_parent_3() -> Parent.SpecialImplicit: pass +def plausible_parent_4() -> Parent.SpecialExplicit: pass +reveal_type(plausible_parent_1) # N: Revealed type is "def () -> Parent.NormalImplicit?" +reveal_type(plausible_parent_2) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(plausible_parent_3) # N: Revealed type is "def () -> Union[builtins.int, builtins.str]" +reveal_type(plausible_parent_4) # N: Revealed type is "def () -> Union[builtins.int, builtins.str]" + +def plausible_child_1() -> Child.NormalImplicit: pass # E: Variable "__main__.Parent.NormalImplicit" is not valid as a type \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +def plausible_child_2() -> Child.NormalExplicit: pass +def plausible_child_3() -> Child.SpecialImplicit: pass +def plausible_child_4() -> Child.SpecialExplicit: pass +reveal_type(plausible_child_1) # N: Revealed type is "def () -> Child.NormalImplicit?" +reveal_type(plausible_child_2) # N: Revealed type is "def () -> __main__.Foo" +reveal_type(plausible_child_3) # N: Revealed type is "def () -> Union[builtins.int, builtins.str]" +reveal_type(plausible_child_4) # N: Revealed type is "def () -> Union[builtins.int, builtins.str]" + +# Use type aliases in a type alias context in an implausible way + +def weird_parent_1() -> p.NormalImplicit: pass # E: Name "p.NormalImplicit" is not defined +def weird_parent_2() -> p.NormalExplicit: pass # E: Name "p.NormalExplicit" is not defined +def weird_parent_3() -> p.SpecialImplicit: pass # E: Name "p.SpecialImplicit" is not defined +def weird_parent_4() -> p.SpecialExplicit: pass # E: Name "p.SpecialExplicit" is not defined +reveal_type(weird_parent_1) # N: Revealed type is "def () -> Any" +reveal_type(weird_parent_2) # N: Revealed type is "def () -> Any" +reveal_type(weird_parent_3) # N: Revealed type is "def () -> Any" +reveal_type(weird_parent_4) # N: Revealed type is "def () -> Any" + +def weird_child_1() -> c.NormalImplicit: pass # E: Name "c.NormalImplicit" is not defined +def weird_child_2() -> c.NormalExplicit: pass # E: Name "c.NormalExplicit" is not defined +def weird_child_3() -> c.SpecialImplicit: pass # E: Name "c.SpecialImplicit" is not defined +def weird_child_4() -> c.SpecialExplicit: pass # E: Name "c.SpecialExplicit" is not defined +reveal_type(weird_child_1) # N: Revealed type is "def () -> Any" +reveal_type(weird_child_2) # N: Revealed type is "def () -> Any" +reveal_type(weird_child_3) # N: Revealed type is "def () -> Any" +reveal_type(weird_child_4) # N: Revealed type is "def () -> Any" +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] + +[case testMalformedTypeAliasRuntimeReassignments] +from typing import Union +from typing_extensions import TypeAlias + +class Foo: pass + +NormalImplicit = Foo +NormalExplicit: TypeAlias = Foo +SpecialImplicit = Union[int, str] +SpecialExplicit: TypeAlias = Union[int, str] + +class Parent: + NormalImplicit = Foo + NormalExplicit: TypeAlias = Foo + SpecialImplicit = Union[int, str] + SpecialExplicit: TypeAlias = Union[int, str] + +class Child(Parent): pass + +p = Parent() +c = Child() + +NormalImplicit = 4 # E: Cannot assign multiple types to name "NormalImplicit" without an explicit "Type[...]" annotation \ + # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +NormalExplicit = 4 # E: Cannot assign multiple types to name "NormalExplicit" without an explicit "Type[...]" annotation \ + # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +SpecialImplicit = 4 # E: Cannot assign multiple types to name "SpecialImplicit" without an explicit "Type[...]" annotation +SpecialExplicit = 4 # E: Cannot assign multiple types to name "SpecialExplicit" without an explicit "Type[...]" annotation + +Parent.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +Parent.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +Parent.SpecialImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") +Parent.SpecialExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "") + +Child.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +Child.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +Child.SpecialImplicit = 4 +Child.SpecialExplicit = 4 + +p.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +p.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +p.SpecialImplicit = 4 +p.SpecialExplicit = 4 + +c.NormalImplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +c.NormalExplicit = 4 # E: Incompatible types in assignment (expression has type "int", variable has type "Type[Foo]") +c.SpecialImplicit = 4 +c.SpecialExplicit = 4 +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-medium.pyi] + +[case testNewStyleUnionInTypeAliasWithMalformedInstance] +# flags: --python-version 3.10 +from typing import List + +A = List[int, str] | int # E: "list" expects 1 type argument, but 2 given +B = int | list[int, str] # E: "list" expects 1 type argument, but 2 given +a: A +b: B +reveal_type(a) # N: Revealed type is "Union[builtins.list[Any], builtins.int]" +reveal_type(b) # N: Revealed type is "Union[builtins.int, builtins.list[Any]]" + +[case testValidTypeAliasValues] +from typing import TypeVar, Generic, List + +T = TypeVar("T", int, str) +S = TypeVar("S", int, bytes) + +class C(Generic[T]): ... +class D(C[S]): ... # E: Invalid type argument value for "C" + +U = TypeVar("U") +A = List[C[U]] +x: A[bytes] # E: Value of type variable "T" of "C" cannot be "bytes" + +V = TypeVar("V", bound=int) +class E(Generic[V]): ... +B = List[E[U]] +y: B[str] # E: Type argument "str" of "E" must be a subtype of "int" + +[case testValidTypeAliasValuesMoreRestrictive] +from typing import TypeVar, Generic, List + +T = TypeVar("T") +S = TypeVar("S", int, str) +U = TypeVar("U", bound=int) + +class C(Generic[T]): ... + +A = List[C[S]] +x: A[int] +x_bad: A[bytes] # E: Value of type variable "S" of "A" cannot be "bytes" + +B = List[C[U]] +y: B[int] +y_bad: B[str] # E: Type argument "str" of "B" must be a subtype of "int" + +[case testTupleWithDifferentArgsPy38] +# flags: --python-version 3.8 +NotYet1 = tuple[float] # E: "tuple" is not subscriptable +NotYet2 = tuple[float, float] # E: "tuple" is not subscriptable +NotYet3 = tuple[float, ...] # E: Unexpected "..." \ + # E: "tuple" is not subscriptable +NotYet4 = tuple[float, float, ...] # E: Unexpected "..." \ + # E: "tuple" is not subscriptable +[builtins fixtures/tuple.pyi] + +[case testTupleWithDifferentArgsStub] +# https://github.com/python/mypy/issues/11098 +import tup + +[file tup.pyi] +Correct1 = str | tuple[float, float, str] +Correct2 = tuple[float] | str +Correct3 = tuple[float, ...] | str +Correct4 = tuple[float, str] | str +Correct5 = tuple[int, str] +Correct6 = tuple[int, ...] + +RHSAlias1: type = tuple[int, int] +RHSAlias2: type = tuple[int] +RHSAlias3: type = tuple[int, ...] + +# Wrong: + +WrongTypeElement = str | tuple[float, 1] # E: Invalid type: try using Literal[1] instead? +WrongEllipsis = str | tuple[float, float, ...] # E: Unexpected "..." +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-type-promotion.test b/test-data/unit/check-type-promotion.test index f477a9f2b390..e66153726e7d 100644 --- a/test-data/unit/check-type-promotion.test +++ b/test-data/unit/check-type-promotion.test @@ -54,3 +54,136 @@ def f(x: Union[SupportsFloat, T]) -> Union[SupportsFloat, T]: pass f(0) # should not crash [builtins fixtures/primitives.pyi] [out] + +[case testIntersectionUsingPromotion1] +# flags: --warn-unreachable +from typing import Union + +x: complex = 1 +reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" +reveal_type(x) # N: Revealed type is "builtins.complex" + +y: Union[int, float] +if isinstance(y, float): + reveal_type(y) # N: Revealed type is "builtins.float" +else: + reveal_type(y) # N: Revealed type is "builtins.int" + +reveal_type(y) # N: Revealed type is "Union[builtins.int, builtins.float]" + +if isinstance(y, int): + reveal_type(y) # N: Revealed type is "builtins.int" +else: + reveal_type(y) # N: Revealed type is "builtins.float" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion2] +# flags: --warn-unreachable +x: complex = 1 +reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, (int, float)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" + +# Note we make type precise, since type promotions are involved +reveal_type(x) # N: Revealed type is "Union[builtins.complex, builtins.int, builtins.float]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion3] +# flags: --warn-unreachable +x: object +if isinstance(x, int) and isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.int" +if isinstance(x, complex) and isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion4] +# flags: --warn-unreachable +x: object +if isinstance(x, int): + if isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.int" + else: + reveal_type(x) # N: Revealed type is "builtins.int" +if isinstance(x, complex): + if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" + else: + reveal_type(x) # N: Revealed type is "builtins.complex" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion5] +# flags: --warn-unreachable +from typing import Union + +x: Union[float, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion6] +# flags: --warn-unreachable +from typing import Union + +x: Union[str, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.complex]" +reveal_type(x) # N: Revealed type is "Union[builtins.str, builtins.int, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion7] +# flags: --warn-unreachable +from typing import Union + +x: Union[int, float, complex] +if isinstance(x, int): + reveal_type(x) # N: Revealed type is "builtins.int" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" + +if isinstance(x, float): + reveal_type(x) # N: Revealed type is "builtins.float" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.complex]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" + +if isinstance(x, complex): + reveal_type(x) # N: Revealed type is "builtins.complex" +else: + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" + +reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float, builtins.complex]" +[builtins fixtures/primitives.pyi] + +[case testIntersectionUsingPromotion8] +# flags: --warn-unreachable +from typing import Union + +x: Union[int, float, complex] +if isinstance(x, (int, float)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.float]" +else: + reveal_type(x) # N: Revealed type is "builtins.complex" +if isinstance(x, (int, complex)): + reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.complex]" +else: + reveal_type(x) # N: Revealed type is "builtins.float" +if isinstance(x, (float, complex)): + reveal_type(x) # N: Revealed type is "Union[builtins.float, builtins.complex]" +else: + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-typeddict.test b/test-data/unit/check-typeddict.test index 49c1fe1c9279..e3d6188b643b 100644 --- a/test-data/unit/check-typeddict.test +++ b/test-data/unit/check-typeddict.test @@ -221,6 +221,19 @@ reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {'y': builtins.in [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] +[case testCannotCreateTypedDictWithDecoratedFunction] +# flags: --disallow-any-expr +# https://github.com/python/mypy/issues/13066 +from typing import TypedDict +class D(TypedDict): + @classmethod # E: Invalid statement in TypedDict definition; expected "field_name: field_type" + def m(self) -> D: + pass +d = D() +reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {})" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + [case testTypedDictWithClassmethodAlternativeConstructorDoesNotCrash] # https://github.com/python/mypy/issues/5653 from typing import TypedDict @@ -478,9 +491,9 @@ fun2(a) # Error main:17: error: Argument 1 to "fun2" has incompatible type "A"; expected "StrIntMap" main:17: note: Following member(s) of "A" have conflicts: main:17: note: Expected: -main:17: note: def __getitem__(self, str) -> int +main:17: note: def __getitem__(self, str, /) -> int main:17: note: Got: -main:17: note: def __getitem__(self, str) -> object +main:17: note: def __getitem__(self, str, /) -> object [case testTypedDictWithSimpleProtocolInference] from typing_extensions import Protocol, TypedDict @@ -880,17 +893,27 @@ B = TypedDict('B', {'@type': Literal['b-type'], 'b': int}) c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} reveal_type(c) # N: Revealed type is "Union[TypedDict('__main__.A', {'@type': Literal['a-type'], 'a': builtins.str}), TypedDict('__main__.B', {'@type': Literal['b-type'], 'b': builtins.int})]" -[builtins fixtures/tuple.pyi] +[builtins fixtures/dict.pyi] -[case testTypedDictUnionAmbiguousCase] +[case testTypedDictUnionAmbiguousCaseBothMatch] from typing import Union, Mapping, Any, cast from typing_extensions import TypedDict, Literal -A = TypedDict('A', {'@type': Literal['a-type'], 'a': str}) -B = TypedDict('B', {'@type': Literal['a-type'], 'a': str}) +A = TypedDict('A', {'@type': Literal['a-type'], 'value': str}) +B = TypedDict('B', {'@type': Literal['b-type'], 'value': str}) -c: Union[A, B] = {'@type': 'a-type', 'a': 'Test'} # E: Type of TypedDict is ambiguous, could be any of ("A", "B") \ - # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") +c: Union[A, B] = {'@type': 'a-type', 'value': 'Test'} +[builtins fixtures/dict.pyi] + +[case testTypedDictUnionAmbiguousCaseNoMatch] +from typing import Union, Mapping, Any, cast +from typing_extensions import TypedDict, Literal + +A = TypedDict('A', {'@type': Literal['a-type'], 'value': int}) +B = TypedDict('B', {'@type': Literal['b-type'], 'value': int}) + +c: Union[A, B] = {'@type': 'a-type', 'value': 'Test'} # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Incompatible types in assignment (expression has type "Dict[str, str]", variable has type "Union[A, B]") [builtins fixtures/dict.pyi] -- Use dict literals @@ -1061,8 +1084,8 @@ reveal_type(d) \ [case testTypedDictWithInvalidTotalArgument] from mypy_extensions import TypedDict -A = TypedDict('A', {'x': int}, total=0) # E: TypedDict() "total" argument must be True or False -B = TypedDict('B', {'x': int}, total=bool) # E: TypedDict() "total" argument must be True or False +A = TypedDict('A', {'x': int}, total=0) # E: "total" argument must be a True or False literal +B = TypedDict('B', {'x': int}, total=bool) # E: "total" argument must be a True or False literal C = TypedDict('C', {'x': int}, x=False) # E: Unexpected keyword argument "x" for "TypedDict" D = TypedDict('D', {'x': int}, False) # E: Unexpected arguments to TypedDict() [builtins fixtures/dict.pyi] @@ -1156,12 +1179,12 @@ reveal_type(d) # N: Revealed type is "TypedDict('__main__.D', {'x'?: builtins.in [case testTypedDictClassWithInvalidTotalArgument] from mypy_extensions import TypedDict -class D(TypedDict, total=1): # E: Value of "total" must be True or False +class D(TypedDict, total=1): # E: "total" argument must be a True or False literal x: int -class E(TypedDict, total=bool): # E: Value of "total" must be True or False +class E(TypedDict, total=bool): # E: "total" argument must be a True or False literal x: int -class F(TypedDict, total=xyz): # E: Value of "total" must be True or False \ - # E: Name "xyz" is not defined +class F(TypedDict, total=xyz): # E: Name "xyz" is not defined \ + # E: "total" argument must be a True or False literal x: int [builtins fixtures/dict.pyi] @@ -1443,7 +1466,7 @@ reveal_type(x['a']['b']) # N: Revealed type is "builtins.int" [case testSelfRecursiveTypedDictInheriting] from mypy_extensions import TypedDict - +# flags: --disable-recursive-aliases class MovieBase(TypedDict): name: str year: int @@ -1457,7 +1480,7 @@ reveal_type(m['director']['name']) # N: Revealed type is "Any" [out] [case testSubclassOfRecursiveTypedDict] - +# flags: --disable-recursive-aliases from typing import List from mypy_extensions import TypedDict @@ -1481,7 +1504,7 @@ class G(Generic[T]): yb: G[int] # E: Type argument "int" of "G" must be a subtype of "M" yg: G[M] -z: int = G[M]().x['x'] +z: int = G[M]().x['x'] # type: ignore[used-before-def] class M(TypedDict): x: int @@ -2007,21 +2030,226 @@ v = {union: 2} # E: Expected TypedDict key to be string literal num2: Literal['num'] v = {num2: 2} bad2: Literal['bad'] -v = {bad2: 2} # E: Extra key "bad" for TypedDict "Value" +v = {bad2: 2} # E: Missing key "num" for TypedDict "Value" \ + # E: Extra key "bad" for TypedDict "Value" + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_unionWithList] +from __future__ import annotations +from typing import assert_type, TypedDict, Union +from typing_extensions import final + +@final +class D(TypedDict): + foo: int + + +d_or_list: D | list[str] + +if 'foo' in d_or_list: + assert_type(d_or_list, Union[D, list[str]]) +elif 'bar' in d_or_list: + assert_type(d_or_list, list[str]) +else: + assert_type(d_or_list, list[str]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_total] +from __future__ import annotations +from typing import assert_type, Literal, TypedDict, TypeVar, Union +from typing_extensions import final + +@final +class D1(TypedDict): + foo: int + + +@final +class D2(TypedDict): + bar: int + + +d: D1 | D2 + +if 'foo' in d: + assert_type(d, D1) +else: + assert_type(d, D2) + +foo_or_bar: Literal['foo', 'bar'] +if foo_or_bar in d: + assert_type(d, Union[D1, D2]) +else: + assert_type(d, Union[D1, D2]) + +foo_or_invalid: Literal['foo', 'invalid'] +if foo_or_invalid in d: + assert_type(d, D1) + # won't narrow 'foo_or_invalid' + assert_type(foo_or_invalid, Literal['foo', 'invalid']) +else: + assert_type(d, Union[D1, D2]) + # won't narrow 'foo_or_invalid' + assert_type(foo_or_invalid, Literal['foo', 'invalid']) + +TD = TypeVar('TD', D1, D2) + +def f(arg: TD) -> None: + value: int + if 'foo' in arg: + assert_type(arg['foo'], int) + else: + assert_type(arg['bar'], int) + + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_final] +# flags: --warn-unreachable +from __future__ import annotations +from typing import assert_type, TypedDict, Union +from typing_extensions import final + +@final +class DFinal(TypedDict): + foo: int + + +class DNotFinal(TypedDict): + bar: int + + +d_not_final: DNotFinal + +if 'bar' in d_not_final: + assert_type(d_not_final, DNotFinal) +else: + spam = 'ham' # E: Statement is unreachable + +if 'spam' in d_not_final: + assert_type(d_not_final, DNotFinal) +else: + assert_type(d_not_final, DNotFinal) + +d_final: DFinal + +if 'spam' in d_final: + spam = 'ham' # E: Statement is unreachable +else: + assert_type(d_final, DFinal) + +d_union: DFinal | DNotFinal + +if 'foo' in d_union: + assert_type(d_union, Union[DFinal, DNotFinal]) +else: + assert_type(d_union, DNotFinal) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testOperatorContainsNarrowsTypedDicts_partialThroughTotalFalse] +from __future__ import annotations +from typing import assert_type, Literal, TypedDict, Union +from typing_extensions import final + +@final +class DTotal(TypedDict): + required_key: int + + +@final +class DNotTotal(TypedDict, total=False): + optional_key: int + + +d: DTotal | DNotTotal + +if 'required_key' in d: + assert_type(d, DTotal) +else: + assert_type(d, DNotTotal) + +if 'optional_key' in d: + assert_type(d, DNotTotal) +else: + assert_type(d, Union[DTotal, DNotTotal]) + +key: Literal['optional_key', 'required_key'] +if key in d: + assert_type(d, Union[DTotal, DNotTotal]) +else: + assert_type(d, Union[DTotal, DNotTotal]) [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] -[case testCannotUseFinalDecoratorWithTypedDict] +[case testOperatorContainsNarrowsTypedDicts_partialThroughNotRequired] +from __future__ import annotations +from typing import assert_type, Required, NotRequired, TypedDict, Union +from typing_extensions import final + +@final +class D1(TypedDict): + required_key: Required[int] + optional_key: NotRequired[int] + + +@final +class D2(TypedDict): + abc: int + xyz: int + + +d: D1 | D2 + +if 'required_key' in d: + assert_type(d, D1) +else: + assert_type(d, D2) + +if 'optional_key' in d: + assert_type(d, D1) +else: + assert_type(d, Union[D1, D2]) + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testCannotSubclassFinalTypedDict] from typing import TypedDict from typing_extensions import final -@final # E: @final cannot be used with TypedDict +@final class DummyTypedDict(TypedDict): int_val: int float_val: float str_val: str +class SubType(DummyTypedDict): # E: Cannot inherit from final class "DummyTypedDict" + pass + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testCannotSubclassFinalTypedDictWithForwardDeclarations] +from typing import TypedDict +from typing_extensions import final + +@final +class DummyTypedDict(TypedDict): + forward_declared: "ForwardDeclared" + +class SubType(DummyTypedDict): # E: Cannot inherit from final class "DummyTypedDict" + pass + +class ForwardDeclared: pass + [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] @@ -2062,7 +2290,7 @@ reveal_type(foo['baz']) # N: Revealed type is "builtins.list[Any]" from mypy_extensions import TypedDict from typing import Any, List -Foo = TypedDict('Foo', {'bar': Bar, 'baz': Bar}) +Foo = TypedDict('Foo', {'bar': 'Bar', 'baz': 'Bar'}) Bar = List[Any] @@ -2526,12 +2754,28 @@ Alias(key=0, value=0) # E: Missing type parameters for generic type "Alias" \ [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] +[case testGenericTypedDictMultipleGenerics] +# See https://github.com/python/mypy/issues/13755 +from typing import Generic, TypeVar, TypedDict + +T = TypeVar("T") +Foo = TypedDict("Foo", {"bar": T}) +class Stack(Generic[T]): pass + +a = Foo[str] +b = Foo[int] +reveal_type(a) # N: Revealed type is "def (*, bar: builtins.str) -> TypedDict('__main__.Foo', {'bar': builtins.str})" +reveal_type(b) # N: Revealed type is "def (*, bar: builtins.int) -> TypedDict('__main__.Foo', {'bar': builtins.int})" + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + [case testGenericTypedDictCallSyntax] from typing import TypedDict, TypeVar T = TypeVar("T") TD = TypedDict("TD", {"key": int, "value": T}) -reveal_type(TD) # N: Revealed type is "def [T] (*, key: builtins.int, value: T`-1) -> TypedDict('__main__.TD', {'key': builtins.int, 'value': T`-1})" +reveal_type(TD) # N: Revealed type is "def [T] (*, key: builtins.int, value: T`1) -> TypedDict('__main__.TD', {'key': builtins.int, 'value': T`1})" tds: TD[str] reveal_type(tds) # N: Revealed type is "TypedDict('__main__.TD', {'key': builtins.int, 'value': builtins.str})" @@ -2542,3 +2786,90 @@ TD[str](key=0, value=0) # E: Incompatible types (expression has type "int", Typ TD[str]({"key": 0, "value": 0}) # E: Incompatible types (expression has type "int", TypedDict item "value" has type "str") [builtins fixtures/dict.pyi] [typing fixtures/typing-typeddict.pyi] + +[case testTypedDictSelfItemNotAllowed] +from typing import Self, TypedDict, Optional + +class TD(TypedDict): + val: int + next: Optional[Self] # E: Self type cannot be used in TypedDict item type +TDC = TypedDict("TDC", {"val": int, "next": Optional[Self]}) # E: Self type cannot be used in TypedDict item type + +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsInferred] +from typing import TypedDict, Dict + +D = TypedDict("D", {"foo": int}, total=False) + +def f(d: Dict[str, D]) -> None: + args = d["a"] + args.update(d.get("b", {})) # OK +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsDeclared] +from typing import TypedDict, Union + +class A(TypedDict, total=False): + name: str +class B(TypedDict, total=False): + name: str + +def foo(data: Union[A, B]) -> None: ... +foo({"name": "Robert"}) # OK +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsEmpty] +from typing import TypedDict, Union + +class Foo(TypedDict, total=False): + foo: str +class Bar(TypedDict, total=False): + bar: str + +def foo(body: Union[Foo, Bar] = {}) -> None: # OK + ... +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsDistinct] +from typing import TypedDict, Union, Literal + +class A(TypedDict): + type: Literal['a'] + value: bool +class B(TypedDict): + type: Literal['b'] + value: str + +Response = Union[A, B] +def method(message: Response) -> None: ... + +method({'type': 'a', 'value': True}) # OK +method({'type': 'b', 'value': 'abc'}) # OK +method({'type': 'a', 'value': 'abc'}) # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Argument 1 to "method" has incompatible type "Dict[str, str]"; expected "Union[A, B]" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] + +[case testUnionOfEquivalentTypedDictsNested] +from typing import TypedDict, Union + +class A(TypedDict, total=False): + foo: C +class B(TypedDict, total=False): + foo: D +class C(TypedDict, total=False): + c: str +class D(TypedDict, total=False): + d: str + +def foo(data: Union[A, B]) -> None: ... +foo({"foo": {"c": "foo"}}) # OK +foo({"foo": {"e": "foo"}}) # E: Type of TypedDict is ambiguous, none of ("A", "B") matches cleanly \ + # E: Argument 1 to "foo" has incompatible type "Dict[str, Dict[str, str]]"; expected "Union[A, B]" +[builtins fixtures/dict.pyi] +[typing fixtures/typing-typeddict.pyi] diff --git a/test-data/unit/check-typeguard.test b/test-data/unit/check-typeguard.test index cf72e7033087..39bcb091f09e 100644 --- a/test-data/unit/check-typeguard.test +++ b/test-data/unit/check-typeguard.test @@ -37,8 +37,8 @@ reveal_type(foo) # N: Revealed type is "def (a: builtins.object) -> TypeGuard[b [case testTypeGuardCallArgsNone] from typing_extensions import TypeGuard class Point: pass -# TODO: error on the 'def' line (insufficient args for type guard) -def is_point() -> TypeGuard[Point]: pass + +def is_point() -> TypeGuard[Point]: pass # E: TypeGuard functions must have a positional argument def main(a: object) -> None: if is_point(): reveal_type(a) # N: Revealed type is "builtins.object" @@ -227,13 +227,13 @@ def main(a: object) -> None: from typing_extensions import TypeGuard def is_float(a: object, b: object = 0) -> TypeGuard[float]: pass def main1(a: object) -> None: - # This is debatable -- should we support these cases? + if is_float(a=a, b=1): + reveal_type(a) # N: Revealed type is "builtins.float" - if is_float(a=a, b=1): # E: Type guard requires positional argument - reveal_type(a) # N: Revealed type is "builtins.object" + if is_float(b=1, a=a): + reveal_type(a) # N: Revealed type is "builtins.float" - if is_float(b=1, a=a): # E: Type guard requires positional argument - reveal_type(a) # N: Revealed type is "builtins.object" + # This is debatable -- should we support these cases? ta = (a,) if is_float(*ta): # E: Type guard requires positional argument @@ -597,3 +597,77 @@ def func(names: Tuple[str, ...]): if is_two_element_tuple(names): reveal_type(names) # N: Revealed type is "Tuple[builtins.str, builtins.str]" [builtins fixtures/tuple.pyi] + +[case testTypeGuardErroneousDefinitionFails] +from typing_extensions import TypeGuard + +class Z: + def typeguard(self, *, x: object) -> TypeGuard[int]: # E: TypeGuard functions must have a positional argument + ... + +def bad_typeguard(*, x: object) -> TypeGuard[int]: # E: TypeGuard functions must have a positional argument + ... +[builtins fixtures/tuple.pyi] + +[case testTypeGuardWithKeywordArg] +from typing_extensions import TypeGuard + +class Z: + def typeguard(self, x: object) -> TypeGuard[int]: + ... + +def typeguard(x: object) -> TypeGuard[int]: + ... + +n: object +if typeguard(x=n): + reveal_type(n) # N: Revealed type is "builtins.int" + +if Z().typeguard(x=n): + reveal_type(n) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + +[case testStaticMethodTypeGuard] +from typing_extensions import TypeGuard + +class Y: + @staticmethod + def typeguard(h: object) -> TypeGuard[int]: + ... + +x: object +if Y().typeguard(x): + reveal_type(x) # N: Revealed type is "builtins.int" +if Y.typeguard(x): + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] +[builtins fixtures/classmethod.pyi] + +[case testTypeGuardKwargFollowingThroughOverloaded] +from typing import overload, Union +from typing_extensions import TypeGuard + +@overload +def typeguard(x: object, y: str) -> TypeGuard[str]: + ... + +@overload +def typeguard(x: object, y: int) -> TypeGuard[int]: + ... + +def typeguard(x: object, y: Union[int, str]) -> Union[TypeGuard[int], TypeGuard[str]]: + ... + +x: object +if typeguard(x=x, y=42): + reveal_type(x) # N: Revealed type is "builtins.int" + +if typeguard(y=42, x=x): + reveal_type(x) # N: Revealed type is "builtins.int" + +if typeguard(x=x, y="42"): + reveal_type(x) # N: Revealed type is "builtins.str" + +if typeguard(y="42", x=x): + reveal_type(x) # N: Revealed type is "builtins.str" +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 193d1b0a58ba..9afe709ed19b 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -95,19 +95,19 @@ reveal_type(h(args)) # N: Revealed type is "Tuple[builtins.str, builtins.str, b [builtins fixtures/tuple.pyi] [case testTypeVarTupleGenericClassDefn] -from typing import Generic, TypeVar, Tuple -from typing_extensions import TypeVarTuple +from typing import Generic, TypeVar, Tuple, Union +from typing_extensions import TypeVarTuple, Unpack T = TypeVar("T") Ts = TypeVarTuple("Ts") -class Variadic(Generic[Ts]): +class Variadic(Generic[Unpack[Ts]]): pass -class Mixed1(Generic[T, Ts]): +class Mixed1(Generic[T, Unpack[Ts]]): pass -class Mixed2(Generic[Ts, T]): +class Mixed2(Generic[Unpack[Ts], T]): pass variadic: Variadic[int, str] @@ -120,6 +120,13 @@ empty: Variadic[()] # TODO: fix pretty printer to be better. reveal_type(empty) # N: Revealed type is "__main__.Variadic" +bad: Variadic[Unpack[Tuple[int, ...]], str, Unpack[Tuple[bool, ...]]] # E: More than one Unpack in a type is not allowed +reveal_type(bad) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[builtins.int, ...]], builtins.str]" + +# TODO: This is tricky to fix because we need typeanal to know whether the current +# location is valid for an Unpack or not. +# bad2: Unpack[Tuple[int, ...]] + m1: Mixed1[int, str, bool] reveal_type(m1) # N: Revealed type is "__main__.Mixed1[builtins.int, builtins.str, builtins.bool]" @@ -133,7 +140,7 @@ Ts = TypeVarTuple("Ts") T = TypeVar("T") S = TypeVar("S") -class Variadic(Generic[T, Ts, S]): +class Variadic(Generic[T, Unpack[Ts], S]): pass def foo(t: Variadic[int, Unpack[Ts], object]) -> Tuple[int, Unpack[Ts]]: @@ -152,7 +159,7 @@ Ts = TypeVarTuple("Ts") T = TypeVar("T") S = TypeVar("S") -class Variadic(Generic[T, Ts, S]): +class Variadic(Generic[T, Unpack[Ts], S]): def __init__(self, t: Tuple[Unpack[Ts]]) -> None: ... @@ -170,3 +177,338 @@ from typing_extensions import TypeVarTuple Ts = TypeVarTuple("Ts") B = Ts # E: Type variable "__main__.Ts" is invalid as target for type alias [builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646ArrayExample] +from typing import Generic, Tuple, TypeVar, Protocol, NewType +from typing_extensions import TypeVarTuple, Unpack + +Shape = TypeVarTuple('Shape') + +Height = NewType('Height', int) +Width = NewType('Width', int) + +T_co = TypeVar("T_co", covariant=True) +T = TypeVar("T") + +class SupportsAbs(Protocol[T_co]): + def __abs__(self) -> T_co: pass + +def abs(a: SupportsAbs[T]) -> T: + ... + +class Array(Generic[Unpack[Shape]]): + def __init__(self, shape: Tuple[Unpack[Shape]]): + self._shape: Tuple[Unpack[Shape]] = shape + + def get_shape(self) -> Tuple[Unpack[Shape]]: + return self._shape + + def __abs__(self) -> Array[Unpack[Shape]]: ... + + def __add__(self, other: Array[Unpack[Shape]]) -> Array[Unpack[Shape]]: ... + +shape = (Height(480), Width(640)) +x: Array[Height, Width] = Array(shape) +reveal_type(abs(x)) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" +reveal_type(x + x) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" + +[builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646ArrayExampleWithDType] +from typing import Generic, Tuple, TypeVar, Protocol, NewType +from typing_extensions import TypeVarTuple, Unpack + +DType = TypeVar("DType") +Shape = TypeVarTuple('Shape') + +Height = NewType('Height', int) +Width = NewType('Width', int) + +T_co = TypeVar("T_co", covariant=True) +T = TypeVar("T") + +class SupportsAbs(Protocol[T_co]): + def __abs__(self) -> T_co: pass + +def abs(a: SupportsAbs[T]) -> T: + ... + +class Array(Generic[DType, Unpack[Shape]]): + def __init__(self, shape: Tuple[Unpack[Shape]]): + self._shape: Tuple[Unpack[Shape]] = shape + + def get_shape(self) -> Tuple[Unpack[Shape]]: + return self._shape + + def __abs__(self) -> Array[DType, Unpack[Shape]]: ... + + def __add__(self, other: Array[DType, Unpack[Shape]]) -> Array[DType, Unpack[Shape]]: ... + +shape = (Height(480), Width(640)) +x: Array[float, Height, Width] = Array(shape) +reveal_type(abs(x)) # N: Revealed type is "__main__.Array[builtins.float, __main__.Height, __main__.Width]" +reveal_type(x + x) # N: Revealed type is "__main__.Array[builtins.float, __main__.Height, __main__.Width]" + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646ArrayExampleInfer] +from typing import Generic, Tuple, TypeVar, NewType +from typing_extensions import TypeVarTuple, Unpack + +Shape = TypeVarTuple('Shape') + +Height = NewType('Height', int) +Width = NewType('Width', int) + +class Array(Generic[Unpack[Shape]]): + pass + +x: Array[float, Height, Width] = Array() +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeConcatenation] +from typing import Generic, TypeVar, NewType +from typing_extensions import TypeVarTuple, Unpack + +Shape = TypeVarTuple('Shape') + +Channels = NewType("Channels", int) +Batch = NewType("Batch", int) +Height = NewType('Height', int) +Width = NewType('Width', int) + +class Array(Generic[Unpack[Shape]]): + pass + + +def add_batch_axis(x: Array[Unpack[Shape]]) -> Array[Batch, Unpack[Shape]]: ... +def del_batch_axis(x: Array[Batch, Unpack[Shape]]) -> Array[Unpack[Shape]]: ... +def add_batch_channels( + x: Array[Unpack[Shape]] +) -> Array[Batch, Unpack[Shape], Channels]: ... + +a: Array[Height, Width] +b = add_batch_axis(a) +reveal_type(b) # N: Revealed type is "__main__.Array[__main__.Batch, __main__.Height, __main__.Width]" +c = del_batch_axis(b) +reveal_type(c) # N: Revealed type is "__main__.Array[__main__.Height, __main__.Width]" +d = add_batch_channels(a) +reveal_type(d) # N: Revealed type is "__main__.Array[__main__.Batch, __main__.Height, __main__.Width, __main__.Channels]" + +[builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646TypeVarConcatenation] +from typing import Generic, TypeVar, NewType, Tuple +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar('T') +Ts = TypeVarTuple('Ts') + +def prefix_tuple( + x: T, + y: Tuple[Unpack[Ts]], +) -> Tuple[T, Unpack[Ts]]: + ... + +z = prefix_tuple(x=0, y=(True, 'a')) +reveal_type(z) # N: Revealed type is "Tuple[builtins.int, builtins.bool, builtins.str]" +[builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646TypeVarTupleUnpacking] +from typing import Generic, TypeVar, NewType, Any, Tuple +from typing_extensions import TypeVarTuple, Unpack + +Shape = TypeVarTuple('Shape') + +Channels = NewType("Channels", int) +Batch = NewType("Batch", int) +Height = NewType('Height', int) +Width = NewType('Width', int) + +class Array(Generic[Unpack[Shape]]): + pass + +def process_batch_channels( + x: Array[Batch, Unpack[Tuple[Any, ...]], Channels] +) -> None: + ... + +x: Array[Batch, Height, Width, Channels] +process_batch_channels(x) +y: Array[Batch, Channels] +process_batch_channels(y) +z: Array[Batch] +process_batch_channels(z) # E: Argument 1 to "process_batch_channels" has incompatible type "Array[Batch]"; expected "Array[Batch, Unpack[Tuple[Any, ...]], Channels]" + +u: Array[Unpack[Tuple[Any, ...]]] + +def expect_variadic_array( + x: Array[Batch, Unpack[Shape]] +) -> None: + ... + +def expect_variadic_array_2( + x: Array[Batch, Height, Width, Channels] +) -> None: + ... + +expect_variadic_array(u) +expect_variadic_array_2(u) + +Ts = TypeVarTuple("Ts") +Ts2 = TypeVarTuple("Ts2") + +def bad(x: Tuple[int, Unpack[Ts], str, Unpack[Ts2]]) -> None: # E: More than one Unpack in a type is not allowed + + ... +reveal_type(bad) # N: Revealed type is "def [Ts, Ts2] (x: Tuple[builtins.int, Unpack[Ts`-1], builtins.str])" + +def bad2(x: Tuple[int, Unpack[Tuple[int, ...]], str, Unpack[Tuple[str, ...]]]) -> None: # E: More than one Unpack in a type is not allowed + ... +reveal_type(bad2) # N: Revealed type is "def (x: Tuple[builtins.int, Unpack[builtins.tuple[builtins.int, ...]], builtins.str])" + + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsBasic] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +# TODO: add less trivial tests with prefix/suffix etc. +# TODO: add tests that call with a type var tuple instead of just args. +def args_to_tuple(*args: Unpack[Ts]) -> Tuple[Unpack[Ts]]: + reveal_type(args) # N: Revealed type is "Tuple[Unpack[Ts`-1]]" + return args + +reveal_type(args_to_tuple(1, 'a')) # N: Revealed type is "Tuple[Literal[1]?, Literal['a']?]" + +[builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646TypeVarStarArgs] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +def with_prefix_suffix(*args: Unpack[Tuple[bool, str, Unpack[Ts], int]]) -> Tuple[bool, str, Unpack[Ts], int]: + reveal_type(args) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + return args + +reveal_type(with_prefix_suffix(True, "bar", "foo", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" +reveal_type(with_prefix_suffix(True, "bar", 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.int]" + +with_prefix_suffix(True, "bar", "foo", 1.0) # E: Argument 4 to "with_prefix_suffix" has incompatible type "float"; expected "int" +with_prefix_suffix(True, "bar") # E: Too few arguments for "with_prefix_suffix" + +t = (True, "bar", "foo", 5) +reveal_type(with_prefix_suffix(*t)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, builtins.str, builtins.int]" +reveal_type(with_prefix_suffix(True, *("bar", "foo"), 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Literal['foo']?, builtins.int]" + +# TODO: handle list case +#reveal_type(with_prefix_suffix(True, "bar", *["foo1", "foo2"], 5)) + +bad_t = (True, "bar") +with_prefix_suffix(*bad_t) # E: Too few arguments for "with_prefix_suffix" + +def foo(*args: Unpack[Ts]) -> None: + reveal_type(with_prefix_suffix(True, "bar", *args, 5)) # N: Revealed type is "Tuple[builtins.bool, builtins.str, Unpack[Ts`-1], builtins.int]" + + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsFixedLengthTuple] +from typing import Tuple +from typing_extensions import Unpack + +def foo(*args: Unpack[Tuple[int, str]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.int, builtins.str]" + +foo(0, "foo") +foo(0, 1) # E: Argument 2 to "foo" has incompatible type "int"; expected "Unpack[Tuple[int, str]]" +foo("foo", "bar") # E: Argument 1 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, str]]" +foo(0, "foo", 1) # E: Invalid number of arguments +foo(0) # E: Invalid number of arguments +foo() # E: Invalid number of arguments +foo(*(0, "foo")) + +# TODO: fix this case to do something sensible. +#def foo2(*args: Unpack[Tuple[bool, Unpack[Tuple[int, str]], bool]]) -> None: +# reveal_type(args) + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646TypeVarStarArgsVariableLengthTuple] +from typing import Tuple +from typing_extensions import Unpack + +def foo(*args: Unpack[Tuple[int, ...]]) -> None: + reveal_type(args) # N: Revealed type is "builtins.tuple[builtins.int, ...]" + +foo(0, 1, 2) +# TODO: this should say 'expected "int"' rather than the unpack +foo(0, 1, "bar") # E: Argument 3 to "foo" has incompatible type "str"; expected "Unpack[Tuple[int, ...]]" + + +def foo2(*args: Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]) -> None: + reveal_type(args) # N: Revealed type is "Tuple[builtins.str, Unpack[builtins.tuple[builtins.int, ...]], builtins.bool, builtins.bool]" + # TODO: generate an error + # reveal_type(args[1]) + +foo2("bar", 1, 2, 3, False, True) +foo2(0, 1, 2, 3, False, True) # E: Argument 1 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", "bar", 2, 3, False, True) # E: Argument 2 to "foo2" has incompatible type "str"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2("bar", 1, 2, 3, 4, True) # E: Argument 5 to "foo2" has incompatible type "int"; expected "Unpack[Tuple[str, Unpack[Tuple[int, ...]], bool, bool]]" +foo2(*("bar", 1, 2, 3, False, True)) +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646Callable] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def call( + target: Callable[[Unpack[Ts]], None], + args: Tuple[Unpack[Ts]], +) -> None: + pass + +def func(arg1: int, arg2: str) -> None: ... +def func2(arg1: int, arg2: int) -> None: ... +def func3(*args: int) -> None: ... + +vargs: Tuple[int, ...] +vargs_str: Tuple[str, ...] + +call(target=func, args=(0, 'foo')) +call(target=func, args=('bar', 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[object, str], None]" +call(target=func, args=(True, 'foo', 0)) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=(0, 0, 'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" + +# NOTE: This behavior may be a bit contentious, it is maybe inconsistent with our handling of +# PEP646 but consistent with our handling of callable constraints. +call(target=func2, args=vargs) # E: Argument "target" to "call" has incompatible type "Callable[[int, int], None]"; expected "Callable[[VarArg(int)], None]" +call(target=func3, args=vargs) +call(target=func3, args=(0,1)) +call(target=func3, args=(0,'foo')) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +call(target=func3, args=vargs_str) # E: Argument "target" to "call" has incompatible type "Callable[[VarArg(int)], None]"; expected "Callable[[VarArg(object)], None]" +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646CallableWithPrefixSuffix] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def call_prefix( + target: Callable[[bytes, Unpack[Ts]], None], + args: Tuple[Unpack[Ts]], +) -> None: + pass + +def func_prefix(arg0: bytes, arg1: int, arg2: str) -> None: ... +def func2_prefix(arg0: str, arg1: int, arg2: str) -> None: ... + +call_prefix(target=func_prefix, args=(0, 'foo')) +call_prefix(target=func2_prefix, args=(0, 'foo')) # E: Argument "target" to "call_prefix" has incompatible type "Callable[[str, int, str], None]"; expected "Callable[[bytes, int, str], None]" +[builtins fixtures/tuple.pyi] + diff --git a/test-data/unit/check-typevar-unbound.test b/test-data/unit/check-typevar-unbound.test index a233a9c7af13..d3e54c75e373 100644 --- a/test-data/unit/check-typevar-unbound.test +++ b/test-data/unit/check-typevar-unbound.test @@ -1,14 +1,23 @@ - [case testUnboundTypeVar] from typing import TypeVar T = TypeVar('T') -def f() -> T: # E: A function returning TypeVar should receive at least one argument containing the same Typevar +def f() -> T: # E: A function returning TypeVar should receive at least one argument containing the same TypeVar ... - f() +U = TypeVar('U', bound=int) + +def g() -> U: # E: A function returning TypeVar should receive at least one argument containing the same TypeVar \ + # N: Consider using the upper bound "int" instead + ... + +V = TypeVar('V', int, str) + +# TODO: this should also give an error +def h() -> V: + ... [case testInnerFunctionTypeVar] @@ -21,7 +30,6 @@ def g(a: T) -> T: ... return f() - [case testUnboundIterableOfTypeVars] from typing import Iterable, TypeVar @@ -29,7 +37,6 @@ T = TypeVar('T') def f() -> Iterable[T]: ... - f() [case testBoundTypeVar] @@ -40,7 +47,6 @@ T = TypeVar('T') def f(a: T, b: T, c: int) -> T: ... - [case testNestedBoundTypeVar] from typing import Callable, List, Union, Tuple, TypeVar @@ -58,3 +64,9 @@ def h(a: List[Union[Callable[..., T]]]) -> T: def j(a: List[Union[Callable[..., Tuple[T, T]], int]]) -> T: ... [builtins fixtures/tuple.pyi] + +[case testUnboundedTypevarUnpacking] +from typing import TypeVar +T = TypeVar("T") +def f(t: T) -> None: + a, *b = t # E: "object" object is not iterable diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index d5a94f96fae7..a4a4d68bd9fe 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -702,3 +702,12 @@ class Indexable: [builtins fixtures/tuple.pyi] [builtins fixtures/classmethod.pyi] + +[case testTypeVarWithValueDeferral] +from typing import TypeVar, Callable + +T = TypeVar("T", "A", "B") +Func = Callable[[], T] + +class A: ... +class B: ... diff --git a/test-data/unit/check-unions.test b/test-data/unit/check-unions.test index e772b489a6d2..65d5c1abc7e8 100644 --- a/test-data/unit/check-unions.test +++ b/test-data/unit/check-unions.test @@ -193,11 +193,19 @@ elif foo(): elif foo(): def f(x: Union[int, str, int, int, str]) -> None: pass elif foo(): - def f(x: Union[int, str, float]) -> None: pass # E: All conditional function variants must have identical signatures + def f(x: Union[int, str, float]) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: Union[int, str]) -> None \ + # N: Redefinition: \ + # N: def f(x: Union[int, str, float]) -> None elif foo(): def f(x: Union[S, T]) -> None: pass elif foo(): - def f(x: Union[str]) -> None: pass # E: All conditional function variants must have identical signatures + def f(x: Union[str]) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def f(x: Union[int, str]) -> None \ + # N: Redefinition: \ + # N: def f(x: str) -> None else: def f(x: Union[Union[int, T], Union[S, T], str]) -> None: pass @@ -206,7 +214,11 @@ else: if foo(): def g(x: Union[int, str, bytes]) -> None: pass else: - def g(x: Union[int, str]) -> None: pass # E: All conditional function variants must have identical signatures + def g(x: Union[int, str]) -> None: pass # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def g(x: Union[int, str, bytes]) -> None \ + # N: Redefinition: \ + # N: def g(x: Union[int, str]) -> None [case testUnionSimplificationSpecialCases] from typing import Any, TypeVar, Union @@ -343,12 +355,12 @@ def foo(a: Union[A, B, C]): from typing import TypeVar, Union T = TypeVar('T') S = TypeVar('S') -def u(x: T, y: S) -> Union[S, T]: pass +def u(x: T, y: S) -> Union[T, S]: pass -reveal_type(u(1, 2.3)) # N: Revealed type is "builtins.float" -reveal_type(u(2.3, 1)) # N: Revealed type is "builtins.float" -reveal_type(u(False, 2.2)) # N: Revealed type is "builtins.float" -reveal_type(u(2.2, False)) # N: Revealed type is "builtins.float" +reveal_type(u(1, 2.3)) # N: Revealed type is "Union[builtins.int, builtins.float]" +reveal_type(u(2.3, 1)) # N: Revealed type is "Union[builtins.float, builtins.int]" +reveal_type(u(False, 2.2)) # N: Revealed type is "Union[builtins.bool, builtins.float]" +reveal_type(u(2.2, False)) # N: Revealed type is "Union[builtins.float, builtins.bool]" [builtins fixtures/primitives.pyi] [case testSimplifyingUnionWithTypeTypes1] @@ -479,7 +491,7 @@ class E: [case testUnionSimplificationWithBoolIntAndFloat] from typing import List, Union l = reveal_type([]) # type: List[Union[bool, int, float]] \ - # N: Revealed type is "builtins.list[builtins.float]" + # N: Revealed type is "builtins.list[Union[builtins.int, builtins.float]]" reveal_type(l) \ # N: Revealed type is "builtins.list[Union[builtins.bool, builtins.int, builtins.float]]" [builtins fixtures/list.pyi] @@ -487,7 +499,7 @@ reveal_type(l) \ [case testUnionSimplificationWithBoolIntAndFloat2] from typing import List, Union l = reveal_type([]) # type: List[Union[bool, int, float, str]] \ - # N: Revealed type is "builtins.list[Union[builtins.float, builtins.str]]" + # N: Revealed type is "builtins.list[Union[builtins.int, builtins.float, builtins.str]]" reveal_type(l) \ # N: Revealed type is "builtins.list[Union[builtins.bool, builtins.int, builtins.float, builtins.str]]" [builtins fixtures/list.pyi] @@ -533,7 +545,7 @@ from typing import Union, Tuple, Any a: Union[Tuple[int], Tuple[float]] (a1,) = a -reveal_type(a1) # N: Revealed type is "builtins.float" +reveal_type(a1) # N: Revealed type is "Union[builtins.int, builtins.float]" b: Union[Tuple[int], Tuple[str]] (b1,) = b @@ -546,7 +558,7 @@ from typing import Union, Tuple c: Union[Tuple[int, int], Tuple[int, float]] (c1, c2) = c reveal_type(c1) # N: Revealed type is "builtins.int" -reveal_type(c2) # N: Revealed type is "builtins.float" +reveal_type(c2) # N: Revealed type is "Union[builtins.int, builtins.float]" [builtins fixtures/tuple.pyi] [case testUnionMultiassignGeneric] @@ -613,7 +625,7 @@ b: Union[Tuple[float, int], Tuple[int, int]] b1: object b2: int (b1, b2) = b -reveal_type(b1) # N: Revealed type is "builtins.float" +reveal_type(b1) # N: Revealed type is "Union[builtins.float, builtins.int]" reveal_type(b2) # N: Revealed type is "builtins.int" c: Union[Tuple[int, int], Tuple[int, int]] @@ -627,7 +639,7 @@ d: Union[Tuple[int, int], Tuple[int, float]] d1: object (d1, d2) = d reveal_type(d1) # N: Revealed type is "builtins.int" -reveal_type(d2) # N: Revealed type is "builtins.float" +reveal_type(d2) # N: Revealed type is "Union[builtins.int, builtins.float]" [builtins fixtures/tuple.pyi] [case testUnionMultiassignIndexed] @@ -959,14 +971,14 @@ if x: [builtins fixtures/dict.pyi] [out] -[case testUnpackUnionNoCrashOnPartialNoneList] +[case testUnpackUnionNoCrashOnPartialList] # flags: --strict-optional from typing import Dict, Tuple, List, Any a: Any d: Dict[str, Tuple[List[Tuple[str, str]], str]] -x, _ = d.get(a, ([], [])) -reveal_type(x) # N: Revealed type is "Union[builtins.list[Tuple[builtins.str, builtins.str]], builtins.list[]]" +x, _ = d.get(a, ([], "")) +reveal_type(x) # N: Revealed type is "builtins.list[Tuple[builtins.str, builtins.str]]" for y in x: pass [builtins fixtures/dict.pyi] @@ -992,7 +1004,7 @@ def takes_int(arg: int) -> None: pass takes_int(x) # E: Argument 1 to "takes_int" has incompatible type "Union[ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[int], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[object], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[float], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[str], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[Any], ExtremelyLongTypeNameWhichIsGenericSoWeCanUseItMultipleTimes[bytes]]"; expected "int" [case testRecursiveForwardReferenceInUnion] - +# flags: --disable-recursive-aliases from typing import List, Union MYTYPE = List[Union[str, "MYTYPE"]] # E: Cannot resolve name "MYTYPE" (possible cyclic definition) [builtins fixtures/list.pyi] @@ -1159,6 +1171,25 @@ def foo( foo([1]) [builtins fixtures/list.pyi] +[case testGenericUnionMemberWithTypeVarConstraints] + +from typing import Generic, TypeVar, Union + +T = TypeVar('T', str, int) + +class C(Generic[T]): ... + +def f(s: Union[T, C[T]]) -> T: ... + +ci: C[int] +cs: C[str] + +reveal_type(f(1)) # N: Revealed type is "builtins.int" +reveal_type(f('')) # N: Revealed type is "builtins.str" +reveal_type(f(ci)) # N: Revealed type is "builtins.int" +reveal_type(f(cs)) # N: Revealed type is "builtins.str" + + [case testNestedInstanceTypeAliasUnsimplifiedUnion] from typing import TypeVar, Union, Iterator, List, Any T = TypeVar("T") @@ -1171,3 +1202,20 @@ def foo( yield i foo([1]) [builtins fixtures/list.pyi] + +[case testUnionIterableContainer] +from typing import Iterable, Container, Union + +i: Iterable[str] +c: Container[str] +u: Union[Iterable[str], Container[str]] +ni: Union[Iterable[str], int] +nc: Union[Container[str], int] + +'x' in i +'x' in c +'x' in u +'x' in ni # E: Unsupported right operand type for in ("Union[Iterable[str], int]") +'x' in nc # E: Unsupported right operand type for in ("Union[Container[str], int]") +[builtins fixtures/tuple.pyi] +[typing fixtures/typing-full.pyi] diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 289d042d8790..48459dd8941a 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -242,7 +242,11 @@ import sys if sys.version_info >= (3, 5, 0): def foo() -> int: return 0 else: - def foo() -> str: return '' # E: All conditional function variants must have identical signatures + def foo() -> str: return '' # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def foo() -> int \ + # N: Redefinition: \ + # N: def foo() -> str [builtins fixtures/ops.pyi] [out] @@ -253,7 +257,11 @@ import sys if sys.version_info[1:] >= (5, 0): def foo() -> int: return 0 else: - def foo() -> str: return '' # E: All conditional function variants must have identical signatures + def foo() -> str: return '' # E: All conditional function variants must have identical signatures \ + # N: Original: \ + # N: def foo() -> int \ + # N: Redefinition: \ + # N: def foo() -> str [builtins fixtures/ops.pyi] [out] @@ -928,7 +936,8 @@ class Case1: return False and self.missing() # E: Right operand of "and" is never evaluated def test2(self) -> bool: - return not self.property_decorator_missing and self.missing() # E: Right operand of "and" is never evaluated + return not self.property_decorator_missing and self.missing() # E: Function "Callable[[], bool]" could always be true in boolean context \ + # E: Right operand of "and" is never evaluated def property_decorator_missing(self) -> bool: return True @@ -1397,3 +1406,20 @@ a or a # E: Right operand of "or" is never evaluated 1 and a and 1 # E: Right operand of "and" is never evaluated a and a # E: Right operand of "and" is never evaluated [builtins fixtures/exception.pyi] + +[case testUnreachableFlagWithTerminalBranchInDeferredNode] +# flags: --warn-unreachable +from typing import NoReturn + +def assert_never(x: NoReturn) -> NoReturn: ... + +def force_forward_ref() -> int: + return 4 + +def f(value: None) -> None: + x + if value is not None: + assert_never(value) + +x = force_forward_ref() +[builtins fixtures/exception.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index 4dc10c9f7489..d598fe13b7e9 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -38,6 +38,13 @@ def test(*t: type) -> None: [case testCallingVarArgsFunction] +def f( *a: 'A') -> None: pass + +def g() -> None: pass + +class A: pass +class B(A): pass +class C: pass a = None # type: A b = None # type: B @@ -51,17 +58,14 @@ f() f(a) f(b) f(a, b, a, b) +[builtins fixtures/list.pyi] -def f( *a: 'A') -> None: pass - -def g() -> None: pass +[case testCallingVarArgsFunctionWithAlsoNormalArgs] +def f(a: 'C', *b: 'A') -> None: pass class A: pass class B(A): pass class C: pass -[builtins fixtures/list.pyi] - -[case testCallingVarArgsFunctionWithAlsoNormalArgs] a = None # type: A b = None # type: B @@ -73,15 +77,16 @@ f(c, a, b, c) # E: Argument 4 to "f" has incompatible type "C"; expected "A" f(c) f(c, a) f(c, b, b, a, b) +[builtins fixtures/list.pyi] -def f(a: 'C', *b: 'A') -> None: pass +[case testCallingVarArgsFunctionWithDefaultArgs] +# flags: --implicit-optional --no-strict-optional +def f(a: 'C' = None, *b: 'A') -> None: + pass class A: pass class B(A): pass class C: pass -[builtins fixtures/list.pyi] - -[case testCallingVarArgsFunctionWithDefaultArgs] a = None # type: A b = None # type: B @@ -94,13 +99,6 @@ f() f(c) f(c, a) f(c, b, b, a, b) - -def f(a: 'C' = None, *b: 'A') -> None: - pass - -class A: pass -class B(A): pass -class C: pass [builtins fixtures/list.pyi] [case testCallVarargsFunctionWithIterable] @@ -155,6 +153,14 @@ f(*it1, '') # E: Argument 2 to "f" has incompatible type "str"; expected "int" [case testTypeInferenceWithCalleeVarArgs] from typing import TypeVar T = TypeVar('T') + +def f( *a: T) -> T: + pass + +class A: pass +class B(A): pass +class C: pass + a = None # type: A b = None # type: B c = None # type: C @@ -179,13 +185,6 @@ if int(): o = f(a, b, o) if int(): c = f(c) - -def f( *a: T) -> T: - pass - -class A: pass -class B(A): pass -class C: pass [builtins fixtures/list.pyi] [case testTypeInferenceWithCalleeVarArgsAndDefaultArgs] @@ -194,6 +193,11 @@ T = TypeVar('T') a = None # type: A o = None # type: object +def f(a: T, b: T = None, *c: T) -> T: + pass + +class A: pass + if int(): a = f(o) # E: Incompatible types in assignment (expression has type "object", variable has type "A") if int(): @@ -209,11 +213,6 @@ if int(): a = f(a, a) if int(): a = f(a, a, a) - -def f(a: T, b: T = None, *c: T) -> T: - pass - -class A: pass [builtins fixtures/list.pyi] @@ -223,27 +222,31 @@ class A: pass [case testCallingWithListVarArgs] from typing import List, Any, cast + +def f(a: 'A', b: 'B') -> None: + pass + +class A: pass +class B: pass + aa = None # type: List[A] ab = None # type: List[B] a = None # type: A b = None # type: B -f(*aa) # Fail +f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" f(a, *ab) # Ok f(a, b) (cast(Any, f))(*aa) # IDEA: Move to check-dynamic? (cast(Any, f))(a, *ab) # IDEA: Move to check-dynamic? - -def f(a: 'A', b: 'B') -> None: - pass +[builtins fixtures/list.pyi] +[case testCallingWithTupleVarArgs] +def f(a: 'A', b: 'B', c: 'C') -> None: pass class A: pass class B: pass -[builtins fixtures/list.pyi] -[out] -main:7: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" - -[case testCallingWithTupleVarArgs] +class C: pass +class CC(C): pass a = None # type: A b = None # type: B @@ -261,27 +264,20 @@ f(*(a, b, c)) f(a, *(b, c)) f(a, b, *(c,)) f(a, *(b, cc)) - -def f(a: 'A', b: 'B', c: 'C') -> None: pass - -class A: pass -class B: pass -class C: pass -class CC(C): pass [builtins fixtures/tuple.pyi] [case testInvalidVarArg] +def f(a: 'A') -> None: + pass + +class A: pass + a = None # type: A f(*None) f(*a) # E: List or tuple expected as variadic arguments f(*(a,)) - -def f(a: 'A') -> None: - pass - -class A: pass [builtins fixtures/tuple.pyi] @@ -291,34 +287,33 @@ class A: pass [case testCallingVarArgsFunctionWithListVarArgs] from typing import List + +def f(a: 'A', *b: 'B') -> None: pass +def g(a: 'A', *b: 'A') -> None: pass +class A: pass +class B: pass + aa, ab, a, b = None, None, None, None # type: (List[A], List[B], A, B) -f(*aa) # Fail -f(a, *aa) # Fail -f(b, *ab) # Fail -f(a, a, *ab) # Fail -f(a, b, *aa) # Fail -f(b, b, *ab) # Fail -g(*ab) # Fail +f(*aa) # E: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" +f(a, *aa) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" +f(b, *ab) # E: Argument 1 to "f" has incompatible type "B"; expected "A" +f(a, a, *ab) # E: Argument 2 to "f" has incompatible type "A"; expected "B" +f(a, b, *aa) # E: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" +f(b, b, *ab) # E: Argument 1 to "f" has incompatible type "B"; expected "A" +g(*ab) # E: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" f(a, *ab) f(a, b, *ab) f(a, b, b, *ab) g(*aa) +[builtins fixtures/list.pyi] +[case testCallingVarArgsFunctionWithTupleVarArgs] +def f(a: 'A', *b: 'B') -> None: + pass -def f(a: 'A', *b: 'B') -> None: pass -def g(a: 'A', *b: 'A') -> None: pass class A: pass class B: pass -[builtins fixtures/list.pyi] -[out] -main:3: error: Argument 1 to "f" has incompatible type "*List[A]"; expected "B" -main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" -main:5: error: Argument 1 to "f" has incompatible type "B"; expected "A" -main:6: error: Argument 2 to "f" has incompatible type "A"; expected "B" -main:7: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" -main:8: error: Argument 1 to "f" has incompatible type "B"; expected "A" -main:9: error: Argument 1 to "g" has incompatible type "*List[B]"; expected "A" - -[case testCallingVarArgsFunctionWithTupleVarArgs] +class C: pass +class CC(C): pass a, b, c, cc = None, None, None, None # type: (A, B, C, CC) @@ -334,14 +329,6 @@ f(*()) # E: Too few arguments for "f" f(*(a, b, b)) f(a, *(b, b)) f(a, b, *(b,)) - -def f(a: 'A', *b: 'B') -> None: - pass - -class A: pass -class B: pass -class C: pass -class CC(C): pass [builtins fixtures/list.pyi] @@ -351,24 +338,30 @@ class CC(C): pass [case testDynamicVarArg] from typing import Any +def f(a: 'A') -> None: pass +def g(a: 'A', *b: 'A') -> None: pass +class A: pass + d, a = None, None # type: (Any, A) -f(a, a, *d) # Fail +f(a, a, *d) # E: Too many arguments for "f" f(a, *d) # Ok f(*d) # Ok g(*d) g(a, *d) g(a, a, *d) - -def f(a: 'A') -> None: pass -def g(a: 'A', *b: 'A') -> None: pass -class A: pass [builtins fixtures/list.pyi] -[out] -main:3: error: Too many arguments for "f" - [case testListVarArgsAndSubtyping] from typing import List +def f( *a: 'A') -> None: + pass + +def g( *a: 'B') -> None: + pass + +class A: pass +class B(A): pass + aa = None # type: List[A] ab = None # type: List[B] @@ -376,43 +369,30 @@ g(*aa) # E: Argument 1 to "g" has incompatible type "*List[A]"; expected "B" f(*aa) f(*ab) g(*ab) +[builtins fixtures/list.pyi] -def f( *a: 'A') -> None: - pass +[case testCallerVarArgsAndDefaultArgs] +# flags: --implicit-optional --no-strict-optional -def g( *a: 'B') -> None: +def f(a: 'A', b: 'B' = None, *c: 'B') -> None: pass class A: pass -class B(A): pass -[builtins fixtures/list.pyi] - -[case testCallerVarArgsAndDefaultArgs] +class B: pass a, b = None, None # type: (A, B) -f(*()) # Fail -f(a, *[a]) # Fail -f(a, b, *[a]) # Fail -f(*(a, a, b)) # Fail +f(*()) # E: Too few arguments for "f" +f(a, *[a]) # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]" \ + # E: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" +f(a, b, *[a]) # E: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" +f(*(a, a, b)) # E: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "Optional[B]" f(*(a,)) f(*(a, b)) f(*(a, b, b, b)) f(a, *[]) f(a, *[b]) f(a, *[b, b]) - -def f(a: 'A', b: 'B' = None, *c: 'B') -> None: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] -[out] -main:3: error: Too few arguments for "f" -main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "Optional[B]" -main:4: error: Argument 2 to "f" has incompatible type "*List[A]"; expected "B" -main:5: error: Argument 3 to "f" has incompatible type "*List[A]"; expected "B" -main:6: error: Argument 1 to "f" has incompatible type "*Tuple[A, A, B]"; expected "Optional[B]" [case testVarArgsAfterKeywordArgInCall1] # see: mypy issue #2729 @@ -531,6 +511,13 @@ def f(a: B, *b: B) -> B: pass from typing import List, TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') + +def f(a: S, *b: T) -> Tuple[S, T]: + pass + +class A: pass +class B: pass + a, b, aa = None, None, None # type: (A, B, List[A]) if int(): @@ -554,18 +541,18 @@ if int(): b, a = f(b, *aa) if int(): b, a = f(b, a, *aa) - -def f(a: S, *b: T) -> Tuple[S, T]: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerVarArgsTupleWithTypeInference] from typing import TypeVar, Tuple S = TypeVar('S') T = TypeVar('T') + +def f(a: S, b: T) -> Tuple[S, T]: pass + +class A: pass +class B: pass + a, b = None, None # type: (A, B) if int(): @@ -582,11 +569,6 @@ if int(): a, b = f(*(a, b)) if int(): a, b = f(a, *(b,)) - -def f(a: S, b: T) -> Tuple[S, T]: pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerVarargsAndComplexTypeInference] @@ -598,6 +580,13 @@ ao = None # type: List[object] aa = None # type: List[A] ab = None # type: List[B] +class G(Generic[T]): + def f(self, *a: S) -> Tuple[List[S], List[T]]: + pass + +class A: pass +class B: pass + if int(): a, aa = G().f(*[a]) \ # E: Incompatible types in assignment (expression has type "List[A]", variable has type "A") \ @@ -624,13 +613,6 @@ if int(): # E: Incompatible types in assignment (expression has type "List[]", variable has type "List[A]") \ # N: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance \ # N: Consider using "Sequence" instead, which is covariant - -class G(Generic[T]): - def f(self, *a: S) -> Tuple[List[S], List[T]]: - pass - -class A: pass -class B: pass [builtins fixtures/list.pyi] [case testCallerTupleVarArgsAndGenericCalleeVarArg] @@ -760,3 +742,327 @@ bar(*good3) bar(*bad1) # E: Argument 1 to "bar" has incompatible type "*I[str]"; expected "float" bar(*bad2) # E: List or tuple expected as variadic arguments [builtins fixtures/dict.pyi] + +-- Keyword arguments unpacking + +[case testUnpackKwargsReveal] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int +def foo(arg: bool, **kwargs: Unpack[Person]) -> None: ... + +reveal_type(foo) # N: Revealed type is "def (arg: builtins.bool, **kwargs: Unpack[TypedDict('__main__.Person', {'name': builtins.str, 'age': builtins.int})])" +[builtins fixtures/dict.pyi] + +[case testUnpackOutsideOfKwargs] +from typing_extensions import Unpack, TypedDict +class Person(TypedDict): + name: str + age: int + +def foo(x: Unpack[Person]) -> None: # E: TypedDict('__main__.Person', {'name': builtins.str, 'age': builtins.int}) cannot be unpacked (must be tuple or TypeVarTuple) + ... +def bar(x: int, *args: Unpack[Person]) -> None: # E: TypedDict('__main__.Person', {'name': builtins.str, 'age': builtins.int}) cannot be unpacked (must be tuple or TypeVarTuple) + ... +def baz(**kwargs: Unpack[Person]) -> None: # OK + ... +[builtins fixtures/dict.pyi] + +[case testUnpackWithoutTypedDict] +from typing_extensions import Unpack + +def foo(**kwargs: Unpack[dict]) -> None: # E: Unpack item in ** argument must be a TypedDict + ... +[builtins fixtures/dict.pyi] + +[case testUnpackWithDuplicateKeywords] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int +def foo(name: str, **kwargs: Unpack[Person]) -> None: # E: Overlap between argument names and ** TypedDict items: "name" + ... +[builtins fixtures/dict.pyi] + +[case testUnpackWithDuplicateKeywordKwargs] +from typing_extensions import Unpack, TypedDict +from typing import Dict, List + +class Spec(TypedDict): + args: List[int] + kwargs: Dict[int, int] +def foo(**kwargs: Unpack[Spec]) -> None: # Allowed + ... +foo(args=[1], kwargs={"2": 3}) # E: Dict entry 0 has incompatible type "str": "int"; expected "int": "int" +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsNonIdentifier] +from typing_extensions import Unpack, TypedDict + +Weird = TypedDict("Weird", {"@": int}) + +def foo(**kwargs: Unpack[Weird]) -> None: + reveal_type(kwargs["@"]) # N: Revealed type is "builtins.int" +foo(**{"@": 42}) +foo(**{"no": "way"}) # E: Argument 1 to "foo" has incompatible type "**Dict[str, str]"; expected "int" +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsEmpty] +from typing_extensions import Unpack, TypedDict + +Empty = TypedDict("Empty", {}) + +def foo(**kwargs: Unpack[Empty]) -> None: # N: "foo" defined here + reveal_type(kwargs) # N: Revealed type is "TypedDict('__main__.Empty', {})" +foo() +foo(x=1) # E: Unexpected keyword argument "x" for "foo" +[builtins fixtures/dict.pyi] + +[case testUnpackTypedDictTotality] +from typing_extensions import Unpack, TypedDict + +class Circle(TypedDict, total=True): + radius: int + color: str + x: int + y: int + +def foo(**kwargs: Unpack[Circle]): + ... +foo(x=0, y=0, color='orange') # E: Missing named argument "radius" for "foo" + +class Square(TypedDict, total=False): + side: int + color: str + +def bar(**kwargs: Unpack[Square]): + ... +bar(side=12) +[builtins fixtures/dict.pyi] + +[case testUnpackUnexpectedKeyword] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict, total=False): + name: str + age: int + +def foo(**kwargs: Unpack[Person]) -> None: # N: "foo" defined here + ... +foo(name='John', age=42, department='Sales') # E: Unexpected keyword argument "department" for "foo" +foo(name='Jennifer', age=38) +[builtins fixtures/dict.pyi] + +[case testUnpackKeywordTypes] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +def foo(**kwargs: Unpack[Person]): + ... +foo(name='John', age='42') # E: Argument "age" to "foo" has incompatible type "str"; expected "int" +foo(name='Jennifer', age=38) +[builtins fixtures/dict.pyi] + +[case testUnpackKeywordTypesTypedDict] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +class LegacyPerson(TypedDict): + name: str + age: str + +def foo(**kwargs: Unpack[Person]) -> None: + ... +lp = LegacyPerson(name="test", age="42") +foo(**lp) # E: Argument "age" to "foo" has incompatible type "str"; expected "int" +[builtins fixtures/dict.pyi] + +[case testFunctionBodyWithUnpackedKwargs] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +def foo(**kwargs: Unpack[Person]) -> int: + name: str = kwargs['name'] + age: str = kwargs['age'] # E: Incompatible types in assignment (expression has type "int", variable has type "str") + department: str = kwargs['department'] # E: TypedDict "Person" has no key "department" + return kwargs['age'] +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsOverrides] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +class Base: + def foo(self, **kwargs: Unpack[Person]) -> None: ... +class SubGood(Base): + def foo(self, *, name: str, age: int, extra: bool = False) -> None: ... +class SubBad(Base): + def foo(self, *, name: str, age: str) -> None: ... # E: Argument 2 of "foo" is incompatible with supertype "Base"; supertype defines the argument type as "int" \ + # N: This violates the Liskov substitution principle \ + # N: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsOverridesTypedDict] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +class PersonExtra(Person, total=False): + extra: bool + +class Unrelated(TypedDict): + baz: int + +class Base: + def foo(self, **kwargs: Unpack[Person]) -> None: ... +class SubGood(Base): + def foo(self, **kwargs: Unpack[PersonExtra]) -> None: ... +class SubBad(Base): + def foo(self, **kwargs: Unpack[Unrelated]) -> None: ... # E: Signature of "foo" incompatible with supertype "Base" \ + # N: Superclass: \ + # N: def foo(*, name: str, age: int) -> None \ + # N: Subclass: \ + # N: def foo(self, *, baz: int) -> None +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsGeneric] +from typing import Generic, TypeVar +from typing_extensions import Unpack, TypedDict + +T = TypeVar("T") +class Person(TypedDict, Generic[T]): + name: str + value: T + +def foo(**kwargs: Unpack[Person[T]]) -> T: ... +reveal_type(foo(name="test", value=42)) # N: Revealed type is "builtins.int" +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsInference] +from typing import Generic, TypeVar, Protocol +from typing_extensions import Unpack, TypedDict + +T_contra = TypeVar("T_contra", contravariant=True) +class CBPerson(Protocol[T_contra]): + def __call__(self, **kwargs: Unpack[Person[T_contra]]) -> None: ... + +T = TypeVar("T") +class Person(TypedDict, Generic[T]): + name: str + value: T + +def test(cb: CBPerson[T]) -> T: ... + +def foo(*, name: str, value: int) -> None: ... +reveal_type(test(foo)) # N: Revealed type is "builtins.int" +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsOverload] +from typing import Any, overload +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +class Fruit(TypedDict): + sort: str + taste: int + +@overload +def foo(**kwargs: Unpack[Person]) -> int: ... +@overload +def foo(**kwargs: Unpack[Fruit]) -> str: ... +def foo(**kwargs: Any) -> Any: + ... + +reveal_type(foo(sort="test", taste=999)) # N: Revealed type is "builtins.str" +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsJoin] +from typing_extensions import Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +def foo(*, name: str, age: int) -> None: ... +def bar(**kwargs: Unpack[Person]) -> None: ... + +reveal_type([foo, bar]) # N: Revealed type is "builtins.list[def (*, name: builtins.str, age: builtins.int)]" +reveal_type([bar, foo]) # N: Revealed type is "builtins.list[def (*, name: builtins.str, age: builtins.int)]" +[builtins fixtures/dict.pyi] + +[case testUnpackKwargsParamSpec] +from typing import Callable, Any, TypeVar, List +from typing_extensions import ParamSpec, Unpack, TypedDict + +class Person(TypedDict): + name: str + age: int + +P = ParamSpec('P') +T = TypeVar('T') + +def dec(f: Callable[P, T]) -> Callable[P, List[T]]: ... + +@dec +def g(**kwargs: Unpack[Person]) -> int: ... + +reveal_type(g) # N: Revealed type is "def (*, name: builtins.str, age: builtins.int) -> builtins.list[builtins.int]" +[builtins fixtures/dict.pyi] + +[case testUnpackGenericTypedDictImplicitAnyEnabled] +from typing import Generic, TypeVar +from typing_extensions import Unpack, TypedDict + +T = TypeVar("T") +class TD(TypedDict, Generic[T]): + key: str + value: T + +def foo(**kwds: Unpack[TD]) -> None: ... # Same as `TD[Any]` +foo(key="yes", value=42) +foo(key="yes", value="ok") +[builtins fixtures/dict.pyi] + +[case testUnpackGenericTypedDictImplicitAnyDisabled] +# flags: --disallow-any-generics +from typing import Generic, TypeVar +from typing_extensions import Unpack, TypedDict + +T = TypeVar("T") +class TD(TypedDict, Generic[T]): + key: str + value: T + +def foo(**kwds: Unpack[TD]) -> None: ... # E: Missing type parameters for generic type "TD" +foo(key="yes", value=42) +foo(key="yes", value="ok") +[builtins fixtures/dict.pyi] + +[case testUnpackNoCrashOnEmpty] +from typing_extensions import Unpack + +class C: + def __init__(self, **kwds: Unpack) -> None: ... # E: Unpack[...] requires exactly one type argument +class D: + def __init__(self, **kwds: Unpack[int, str]) -> None: ... # E: Unpack[...] requires exactly one type argument +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/cmdline.test b/test-data/unit/cmdline.test index 97a9dcaa7410..c2e98cdb74f9 100644 --- a/test-data/unit/cmdline.test +++ b/test-data/unit/cmdline.test @@ -130,17 +130,13 @@ two/mod/__init__.py: note: See https://mypy.readthedocs.io/en/stable/running_myp two/mod/__init__.py: note: Common resolutions include: a) using `--exclude` to avoid checking one of them, b) adding `__init__.py` somewhere, c) using `--explicit-package-bases` or adjusting MYPYPATH == Return code: 2 -[case testFlagsFile-skip] +-- Note that we use `----`, because this is how `--` is escaped while `--` is a comment starter. +[case testFlagsFile] # cmd: mypy @flagsfile [file flagsfile] ---always-true=FLAG +----always-true=FLAG main.py [file main.py] -# TODO: this test case passes if you try the exact same thing -# outside of the test suite. what's going on? it's not related -# to the extra flags that testcmdline adds. and things work -# in the test suite with py2 (perhaps because it's a -# special option) x: int FLAG = False if not FLAG: @@ -429,6 +425,7 @@ follow_imports = skip [out] main.py:2: note: Revealed type is "Any" main.py:4: note: Revealed type is "Any" +== Return code: 0 [case testConfigFollowImportsError] # cmd: mypy main.py @@ -521,7 +518,7 @@ reveal_type(missing.x) # Expect Any ignore_missing_imports = True [out] main.py:2: note: Revealed type is "Any" - +== Return code: 0 [case testFailedImportOnWrongCWD] # cmd: mypy main.py @@ -658,15 +655,26 @@ python_version = 3.6 [file int_pow.py] a = 1 b = a + 2 -reveal_type(a**0) # N: Revealed type is "Literal[1]" -reveal_type(a**1) # N: Revealed type is "builtins.int" -reveal_type(a**2) # N: Revealed type is "builtins.int" -reveal_type(a**-0) # N: Revealed type is "Literal[1]" -reveal_type(a**-1) # N: Revealed type is "builtins.float" -reveal_type(a**(-2)) # N: Revealed type is "builtins.float" -reveal_type(a**b) # N: Revealed type is "Any" -reveal_type(a.__pow__(2)) # N: Revealed type is "builtins.int" -reveal_type(a.__pow__(a)) # N: Revealed type is "Any" +reveal_type(a**0) +reveal_type(a**1) +reveal_type(a**2) +reveal_type(a**-0) +reveal_type(a**-1) +reveal_type(a**(-2)) +reveal_type(a**b) +reveal_type(a.__pow__(2)) +reveal_type(a.__pow__(a)) +[out] +int_pow.py:3: note: Revealed type is "Literal[1]" +int_pow.py:4: note: Revealed type is "builtins.int" +int_pow.py:5: note: Revealed type is "builtins.int" +int_pow.py:6: note: Revealed type is "Literal[1]" +int_pow.py:7: note: Revealed type is "builtins.float" +int_pow.py:8: note: Revealed type is "builtins.float" +int_pow.py:9: note: Revealed type is "Any" +int_pow.py:10: note: Revealed type is "builtins.int" +int_pow.py:11: note: Revealed type is "Any" +== Return code: 0 [case testDisallowAnyGenericsBuiltinCollections] # cmd: mypy m.py @@ -1423,7 +1431,6 @@ exclude = (?x)( [out] c/cpkg.py:1: error: "int" not callable - [case testCmdlineTimingStats] # cmd: mypy --timing-stats timing.txt . [file b/__init__.py] @@ -1439,6 +1446,9 @@ b\.c \d+ # cmd: mypy --enable-incomplete-features a.py [file a.py] pass +[out] +Warning: --enable-incomplete-features is deprecated, use --enable-incomplete-feature=FEATURE instead +== Return code: 0 [case testShadowTypingModuleEarlyLoad] # cmd: mypy dir @@ -1478,3 +1488,113 @@ note: A user-defined top-level module with name "typing" is not supported [out] Failed to find builtin module mypy_extensions, perhaps typeshed is broken? == Return code: 2 + +[case testRecursiveAliasesFlagDeprecated] +# cmd: mypy --enable-recursive-aliases a.py +[file a.py] +pass +[out] +Warning: --enable-recursive-aliases is deprecated; recursive types are enabled by default +== Return code: 0 + +[case testNotesOnlyResultInExitSuccess] +# cmd: mypy a.py +[file a.py] +def f(): + x: int = "no" +[out] +a.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs +== Return code: 0 + +[case testCustomTypeshedDirFilePassedExplicitly] +# cmd: mypy --custom-typeshed-dir dir m.py dir/stdlib/foo.pyi +[file m.py] +1() +[file dir/stdlib/abc.pyi] +1() # Errors are not reported from typeshed by default +[file dir/stdlib/builtins.pyi] +class object: pass +class str(object): pass +class int(object): pass +class list: pass +class dict: pass +[file dir/stdlib/sys.pyi] +[file dir/stdlib/types.pyi] +[file dir/stdlib/typing.pyi] +[file dir/stdlib/mypy_extensions.pyi] +[file dir/stdlib/typing_extensions.pyi] +[file dir/stdlib/foo.pyi] +1() # Errors are reported if the file was explicitly passed on the command line +[file dir/stdlib/VERSIONS] +[out] +dir/stdlib/foo.pyi:1: error: "int" not callable +m.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly1] +# cmd: mypy $CWD/pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly2] +# cmd: mypy pypath/foo.py +[file pypath/foo.py] +1() +[out] +pypath/foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly3] +# cmd: mypy -p foo +# cwd: pypath +[file pypath/foo/__init__.py] +1() +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable +foo/__init__.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly4] +# cmd: mypy -m foo +# cwd: pypath +[file pypath/foo.py] +1() +[out] +foo.py:1: error: "int" not callable + +[case testFileInPythonPathPassedExplicitly5] +# cmd: mypy -m foo.m +# cwd: pypath +[file pypath/foo/__init__.py] +1() # TODO: Maybe this should generate errors as well? But how would we decide? +[file pypath/foo/m.py] +1() +[out] +foo/m.py:1: error: "int" not callable + +[case testCmdlineCfgEnableErrorCodeTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +enable_error_code = + truthy-bool, + redundant-expr, +[out] + +[case testCmdlineCfgDisableErrorCodeTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +disable_error_code = + misc, + override, +[out] + +[case testCmdlineCfgAlwaysTrueTrailingComma] +# cmd: mypy . +[file mypy.ini] +\[mypy] +always_true = + MY_VAR, +[out] diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index 370413ee774b..7586c8763d33 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -185,7 +185,7 @@ Daemon started $ dmypy check foo.py bar.py $ dmypy recheck $ dmypy recheck --update foo.py --remove bar.py sir_not_appearing_in_this_film.py -foo.py:1: error: Import of "bar" ignored +foo.py:1: error: Import of "bar" ignored [misc] foo.py:1: note: (Using --follow-imports=error, module not passed on command line) == Return code: 1 $ dmypy recheck --update bar.py @@ -214,6 +214,20 @@ mypy-daemon: error: Missing target module, package, files, or command. $ dmypy stop Daemon stopped +[case testDaemonWarningSuccessExitCode-posix] +$ dmypy run -- foo.py --follow-imports=error +Daemon started +foo.py:2: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs +Success: no issues found in 1 source file +$ echo $? +0 +$ dmypy stop +Daemon stopped +[file foo.py] +def foo(): + a: int = 1 + print(a + "2") + -- this is carefully constructed to be able to break if the quickstart system lets -- something through incorrectly. in particular, the files need to have the same size [case testDaemonQuickstart] @@ -277,7 +291,7 @@ $ dmypy suggest foo.foo (str) -> int $ {python} -c "import shutil; shutil.copy('foo2.py', 'foo.py')" $ dmypy check foo.py bar.py -bar.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") +bar.py:3: error: Incompatible types in assignment (expression has type "int", variable has type "str") [assignment] == Return code: 1 [file foo.py] def foo(arg): @@ -304,7 +318,7 @@ $ dmypy inspect foo:1:2:3:4 Command "inspect" is only valid after a "check" command (that produces no parse errors) == Return code: 2 $ dmypy check foo.py --export-types -foo.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") +foo.py:3: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] == Return code: 1 $ dmypy inspect foo:1 Format should be file:line:column[:end_line:end_column] @@ -434,12 +448,12 @@ $ dmypy inspect --show attrs bar.py:10:1 --union-attrs [file foo.py] class B: - def b(self) -> int: ... + def b(self) -> int: return 0 a: int class C(B): a: int y: int - def x(self) -> int: ... + def x(self) -> int: return 0 v: C # line 9 if False: @@ -476,7 +490,7 @@ bar/baz.py:4:5:attr $ dmypy inspect foo.py:10:10 --show definition --include-span 10:1:10:12 -> bar/baz.py:6:1:test $ dmypy inspect foo.py:14:6 --show definition --include-span --include-kind -NameExpr:14:5:14:7 -> foo.py:13:1:arg +NameExpr:14:5:14:7 -> foo.py:13:9:arg MemberExpr:14:5:14:9 -> bar/baz.py:9:5:x, bar/baz.py:11:5:x [file foo.py] diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test index 7369ea247e26..66adfaecd909 100644 --- a/test-data/unit/diff.test +++ b/test-data/unit/diff.test @@ -1484,3 +1484,16 @@ C = ParamSpec('C') [out] __main__.B __main__.C + +[case testEmptyBodySuper] +from abc import abstractmethod +class C: + @abstractmethod + def meth(self) -> int: ... +[file next.py] +from abc import abstractmethod +class C: + @abstractmethod + def meth(self) -> int: return 0 +[out] +__main__.C.meth diff --git a/test-data/unit/errorstream.test b/test-data/unit/errorstream.test index 8a73748d27ff..46af433f8916 100644 --- a/test-data/unit/errorstream.test +++ b/test-data/unit/errorstream.test @@ -36,14 +36,14 @@ import b def f() -> int: reveal_type(b.x) return b.x -y = 0 + 0 +y = 0 + int() [file b.py] import a def g() -> int: reveal_type(a.y) return a.y 1 / '' -x = 1 + 1 +x = 1 + int() [out] ==== Errors flushed ==== diff --git a/test-data/unit/fine-grained-attr.test b/test-data/unit/fine-grained-attr.test index 0a54f9a6ea59..3fd40b774c7b 100644 --- a/test-data/unit/fine-grained-attr.test +++ b/test-data/unit/fine-grained-attr.test @@ -21,3 +21,62 @@ class A: [out] == main:5: error: Incompatible return value type (got "Attribute[float]", expected "Attribute[int]") + +[case magicAttributeConsistency] +import m + +[file c.py] +from attr import define + +@define +class A: + a: float + b: int +[builtins fixtures/attr.pyi] + +[file m.py] +from c import A + +A.__attrs_attrs__.a + +[file m.py.2] +from c import A + +A.__attrs_attrs__.b + +[out] +== + +[case magicAttributeConsistency2-only_when_cache] +[file c.py] +import attr + +@attr.s +class Entry: + var: int = attr.ib() +[builtins fixtures/attr.pyi] + +[file m.py] +from typing import Any, ClassVar, Protocol +from c import Entry + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +def func(e: AttrsInstance) -> None: ... +func(Entry(2)) + +[file m.py.2] +from typing import Any, ClassVar, Protocol +from c import Entry + +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +def func(e: AttrsInstance) -> int: + return 2 # Change return type to force reanalysis + +func(Entry(2)) + +[out] +== diff --git a/test-data/unit/fine-grained-blockers.test b/test-data/unit/fine-grained-blockers.test index f3991c0d31e4..a134fb1d4301 100644 --- a/test-data/unit/fine-grained-blockers.test +++ b/test-data/unit/fine-grained-blockers.test @@ -317,8 +317,8 @@ a.py:1: error: invalid syntax == a.py:1: error: invalid syntax == -b.py:3: error: Too many arguments for "f" a.py:3: error: Too many arguments for "g" +b.py:3: error: Too many arguments for "f" [case testDeleteFileWithBlockingError-only_when_nocache] -- Different cache/no-cache tests because: diff --git a/test-data/unit/fine-grained-dataclass-transform.test b/test-data/unit/fine-grained-dataclass-transform.test new file mode 100644 index 000000000000..7dc852f1d733 --- /dev/null +++ b/test-data/unit/fine-grained-dataclass-transform.test @@ -0,0 +1,92 @@ +[case updateDataclassTransformParameterViaDecorator] +# flags: --python-version 3.11 +from m import my_dataclass + +@my_dataclass +class Foo: + x: int + +foo = Foo(1) +foo.x = 2 + +[file m.py] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=False) +def my_dataclass(cls): return cls + +[file m.py.2] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +def my_dataclass(cls): return cls + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[out] +== +main:9: error: Property "x" defined in "Foo" is read-only + +[case updateDataclassTransformParameterViaParentClass] +# flags: --python-version 3.11 +from m import Dataclass + +class Foo(Dataclass): + x: int + +foo = Foo(1) +foo.x = 2 + +[file m.py] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=False) +class Dataclass: ... + +[file m.py.2] +from typing import dataclass_transform + +@dataclass_transform(frozen_default=True) +class Dataclass: ... + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[out] +== +main:8: error: Property "x" defined in "Foo" is read-only + +[case updateBaseClassToUseDataclassTransform] +# flags: --python-version 3.11 +from m import A + +class B(A): + y: int + +B(x=1, y=2) + +[file m.py] +class Dataclass: ... + +class A(Dataclass): + x: int + +[file m.py.2] +from typing import dataclass_transform + +@dataclass_transform() +class Dataclass: ... + +class A(Dataclass): + x: int + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[out] +main:7: error: Unexpected keyword argument "x" for "B" +builtins.pyi:12: note: "B" defined here +main:7: error: Unexpected keyword argument "y" for "B" +builtins.pyi:12: note: "B" defined here +== diff --git a/test-data/unit/fine-grained-follow-imports.test b/test-data/unit/fine-grained-follow-imports.test index 4eb55fb125f7..22f2a7895cf9 100644 --- a/test-data/unit/fine-grained-follow-imports.test +++ b/test-data/unit/fine-grained-follow-imports.test @@ -587,8 +587,8 @@ def f() -> None: main.py:2: error: Cannot find implementation or library stub for module named "p" main.py:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports == -p/m.py:1: error: "str" not callable p/__init__.py:1: error: "int" not callable +p/m.py:1: error: "str" not callable [case testFollowImportsNormalPackageInitFileStub] # flags: --follow-imports=normal @@ -610,11 +610,11 @@ x x x main.py:1: error: Cannot find implementation or library stub for module named "p" main.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports == -p/m.pyi:1: error: "str" not callable p/__init__.pyi:1: error: "int" not callable -== p/m.pyi:1: error: "str" not callable +== p/__init__.pyi:1: error: "int" not callable +p/m.pyi:1: error: "str" not callable [case testFollowImportsNormalNamespacePackages] # flags: --follow-imports=normal --namespace-packages @@ -638,12 +638,12 @@ main.py:2: error: Cannot find implementation or library stub for module named "p main.py:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports main.py:2: error: Cannot find implementation or library stub for module named "p2" == -p2/m2.py:1: error: "str" not callable p1/m1.py:1: error: "int" not callable +p2/m2.py:1: error: "str" not callable == +p1/m1.py:1: error: "int" not callable main.py:2: error: Cannot find implementation or library stub for module named "p2.m2" main.py:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -p1/m1.py:1: error: "int" not callable [case testFollowImportsNormalNewFileOnCommandLine] # flags: --follow-imports=normal @@ -659,8 +659,8 @@ p1/m1.py:1: error: "int" not callable [out] main.py:1: error: "int" not callable == -x.py:1: error: "str" not callable main.py:1: error: "int" not callable +x.py:1: error: "str" not callable [case testFollowImportsNormalSearchPathUpdate-only_when_nocache] # flags: --follow-imports=normal @@ -678,8 +678,8 @@ import bar [out] == -src/bar.py:1: error: "int" not callable src/foo.py:2: error: "str" not callable +src/bar.py:1: error: "int" not callable [case testFollowImportsNormalSearchPathUpdate2-only_when_cache] # flags: --follow-imports=normal @@ -769,3 +769,80 @@ from . import mod3 == main.py:1: error: Cannot find implementation or library stub for module named "pkg" main.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports + +[case testNewImportCycleTypeVarBound] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar +import trio +from . import abc as abc + +T = TypeVar("T", bound=trio.abc.A) + +[file trio/abc.py.2] +import trio +class A: ... +[out] +== + +[case testNewImportCycleTupleBase] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar, Tuple +import trio +from . import abc as abc + +class C(Tuple[trio.abc.A, trio.abc.A]): ... + +[file trio/abc.py.2] +import trio +class A: ... +[builtins fixtures/tuple.pyi] +[out] +== + +[case testNewImportCycleTypedDict] +# flags: --follow-imports=normal +# cmd: mypy main.py +# cmd2: mypy other.py + +[file main.py] +# empty + +[file other.py.2] +import trio + +[file trio/__init__.py.2] +from typing import TypeVar +from typing_extensions import TypedDict +import trio +from . import abc as abc + +class C(TypedDict): + x: trio.abc.A + y: trio.abc.A + +[file trio/abc.py.2] +import trio +class A: ... +[builtins fixtures/dict.pyi] +[out] +== diff --git a/test-data/unit/fine-grained-inspect.test b/test-data/unit/fine-grained-inspect.test index 5661c14bc093..2c575ec365b1 100644 --- a/test-data/unit/fine-grained-inspect.test +++ b/test-data/unit/fine-grained-inspect.test @@ -52,8 +52,8 @@ class Meta(type): == {"C": ["meth", "x"]} {"C": ["meth", "x"], "Meta": ["y"], "type": ["__init__"]} -{} -{"object": ["__init__"]} +{"function": ["__name__"]} +{"function": ["__name__"], "object": ["__init__"]} [case testInspectDefBasic] # inspect2: --show=definition foo.py:5:5 @@ -189,7 +189,7 @@ def foo(arg: T) -> T: return arg [out] == -foo.py:7:1:arg +foo.py:7:9:arg foo.py:4:5:x [case testInspectTypeVarValuesDef] @@ -219,7 +219,7 @@ class C(Generic[T]): [out] == foo.py:5:5:z, tmp/foo.py:9:5:z -foo.py:12:1:arg +foo.py:12:9:arg foo.py:5:5:z, tmp/foo.py:9:5:z [case testInspectModuleAttrs] @@ -236,7 +236,7 @@ class C: ... [builtins fixtures/module.pyi] [out] == -{"": ["C", "__annotations__", "__doc__", "__file__", "__name__", "__package__", "bar", "x"], "ModuleType": ["__file__"]} +{"": ["C", "__annotations__", "__doc__", "__file__", "__name__", "__package__", "bar", "x"], "ModuleType": ["__file__", "__getattr__"]} [case testInspectModuleDef] # inspect2: --show=definition --include-kind foo.py:2:1 @@ -266,4 +266,4 @@ def foo(arg: int) -> int: [out] == -4:12:4:14 -> tmp/foo.py:1:1:arg +4:12:4:14 -> tmp/foo.py:1:9:arg diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index a756398fed1f..f76ced64341b 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -38,8 +38,8 @@ def f(x: int) -> None: pass == a.py:2: error: Incompatible return value type (got "int", expected "str") == -b.py:2: error: Too many arguments for "f" a.py:2: error: Incompatible return value type (got "int", expected "str") +b.py:2: error: Too many arguments for "f" == [case testAddFileFixesError] @@ -845,7 +845,7 @@ main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str" == main:1: error: Cannot find implementation or library stub for module named "p.a" main:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -main:1: error: Cannot find implementation or library stub for module named "p" +main:2: error: "object" has no attribute "a" [case testDeletePackage2] import p @@ -2192,17 +2192,55 @@ x = 'x' [case testLibraryStubsNotInstalled] import a [file a.py] -import waitress +import requests [file a.py.2] # nothing [file a.py.3] -import requests +import jack [out] -a.py:1: error: Library stubs not installed for "waitress" (or incompatible with Python 3.7) -a.py:1: note: Hint: "python3 -m pip install types-waitress" +a.py:1: error: Library stubs not installed for "requests" +a.py:1: note: Hint: "python3 -m pip install types-requests" a.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports == == -a.py:1: error: Library stubs not installed for "requests" (or incompatible with Python 3.7) -a.py:1: note: Hint: "python3 -m pip install types-requests" +a.py:1: error: Library stubs not installed for "jack" +a.py:1: note: Hint: "python3 -m pip install types-JACK-Client" +a.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports + +[case testIgnoreErrorsFromTypeshed] +# flags: --custom-typeshed-dir tmp/ts --follow-imports=normal +# cmd1: mypy a.py +# cmd2: mypy a.py + +[file a.py] +import foobar + +[file ts/stdlib/abc.pyi] +[file ts/stdlib/builtins.pyi] +class object: pass +class str: pass +class ellipsis: pass +[file ts/stdlib/sys.pyi] +[file ts/stdlib/types.pyi] +[file ts/stdlib/typing.pyi] +def cast(x): ... +[file ts/stdlib/typing_extensions.pyi] +[file ts/stdlib/VERSIONS] +[file ts/stubs/mypy_extensions/mypy_extensions.pyi] + +[file ts/stdlib/foobar.pyi.2] +# We report no errors from typeshed. It would be better to test ignoring +# errors from PEP 561 packages, but it's harder to test and uses the +# same code paths, so we are using typeshed instead. +import baz +import zar +undefined + +[file ts/stdlib/baz.pyi.2] +import whatever +undefined + +[out] +a.py:1: error: Cannot find implementation or library stub for module named "foobar" a.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports +== diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index aa53c6482449..58339828677d 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -24,7 +24,7 @@ -- as changed in the initial run with the cache while modules that depended on them -- should be. -- --- Modules that are require a full-module reprocessing by update can be checked with +-- Modules that require a full-module reprocessing by update can be checked with -- [rechecked ...]. This should include any files detected as having changed as well -- as any files that contain targets that need to be reprocessed but which haven't -- been loaded yet. If there is no [rechecked...] directive, it inherits the value of @@ -1586,11 +1586,11 @@ class A: [file b.py.3] 2 [out] -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:3: error: Method must have at least one argument +a.py:3: error: Method must have at least one argument. Did you forget the "self" argument? [case testBaseClassDeleted] import m @@ -1809,14 +1809,14 @@ def f() -> Iterator[None]: [typing fixtures/typing-medium.pyi] [builtins fixtures/list.pyi] [triggered] -2: , __main__ -3: , __main__, a +2: , , __main__ +3: , , __main__, a [out] main:2: note: Revealed type is "contextlib.GeneratorContextManager[None]" == +main:2: note: Revealed type is "contextlib.GeneratorContextManager[None]" a.py:3: error: Cannot find implementation or library stub for module named "b" a.py:3: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -main:2: note: Revealed type is "contextlib.GeneratorContextManager[None]" == main:2: note: Revealed type is "contextlib.GeneratorContextManager[None]" @@ -2007,11 +2007,11 @@ class A: class A: def foo(self) -> int: pass [out] -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == -a.py:2: error: Method must have at least one argument +a.py:2: error: Method must have at least one argument. Did you forget the "self" argument? == [case testPreviousErrorInMethodSemanal2] @@ -2968,7 +2968,7 @@ class M(type): pass [out] == -a.py:3: error: Inconsistent metaclass structure for "D" +a.py:3: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [case testFineMetaclassDeclaredUpdate] import a @@ -2984,7 +2984,7 @@ class M(type): pass class M2(type): pass [out] == -a.py:3: error: Inconsistent metaclass structure for "D" +a.py:3: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [case testFineMetaclassRemoveFromClass] import a @@ -3124,7 +3124,6 @@ whatever: int [out] == b.py:2: error: Name "c.M" is not defined -a.py:3: error: "Type[B]" has no attribute "x" [case testFixMissingMetaclass] import a @@ -3143,7 +3142,6 @@ class M(type): x: int [out] b.py:2: error: Name "c.M" is not defined -a.py:3: error: "Type[B]" has no attribute "x" == [case testGoodMetaclassSpoiled] @@ -3449,7 +3447,6 @@ f(a.x) == [case testNamedTupleUpdate5] -# flags: --enable-recursive-aliases import b [file a.py] from typing import NamedTuple, Optional @@ -3503,7 +3500,7 @@ def foo() -> None: b.py:4: error: Incompatible types in assignment (expression has type "str", variable has type "int") [case testNamedTupleUpdateNonRecursiveToRecursiveFine] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -3546,7 +3543,7 @@ c.py:5: error: Incompatible types in assignment (expression has type "Optional[N c.py:7: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int, fallback=b.M], None], builtins.int, fallback=a.N]" [case testTupleTypeUpdateNonRecursiveToRecursiveFine] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -3579,7 +3576,7 @@ c.py:4: note: Revealed type is "Tuple[Union[Tuple[Union[..., None], builtins.int c.py:5: error: Incompatible types in assignment (expression has type "Optional[N]", variable has type "int") [case testTypeAliasUpdateNonRecursiveToRecursiveFine] -# flags: --enable-recursive-aliases +# flags: --strict-optional import c [file a.py] from b import M @@ -3668,7 +3665,6 @@ def foo(x: Point) -> int: b.py:3: error: Unsupported operand types for + ("int" and "str") [case testTypedDictUpdate3] -# flags: --enable-recursive-aliases import b [file a.py] from mypy_extensions import TypedDict @@ -4305,9 +4301,9 @@ y = 0 [file a.py.2] y = '' [out] -main:4: error: Need type annotation for "x" +main:4: error: Need type annotation for "x" (hint: "x: Optional[] = ...") == -main:4: error: Need type annotation for "x" +main:4: error: Need type annotation for "x" (hint: "x: Optional[] = ...") [case testNonePartialType2] import a @@ -4323,9 +4319,9 @@ y = 0 [file a.py.2] y = '' [out] -main:4: error: Need type annotation for "x" +main:4: error: Need type annotation for "x" (hint: "x: Optional[] = ...") == -main:4: error: Need type annotation for "x" +main:4: error: Need type annotation for "x" (hint: "x: Optional[] = ...") [case testNonePartialType3] import a @@ -4337,7 +4333,7 @@ def f() -> None: y = '' [out] == -a.py:1: error: Need type annotation for "y" +a.py:1: error: Need type annotation for "y" (hint: "y: Optional[] = ...") [case testNonePartialType4] import a @@ -4353,7 +4349,7 @@ def f() -> None: global y y = '' [out] -a.py:1: error: Need type annotation for "y" +a.py:1: error: Need type annotation for "y" (hint: "y: Optional[] = ...") == [case testSkippedClass1] @@ -5527,11 +5523,13 @@ a.py:5: error: Argument 1 to "f" has incompatible type "C"; expected "int" import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5544,25 +5542,27 @@ def T() -> None: pass [out] == -main:4: error: "C" expects no type arguments, but 1 given -main:4: error: Function "a.T" is not valid as a type -main:4: note: Perhaps you need "Callable[...]" or a callback protocol? -main:6: error: Free type variable expected in Generic[...] -main:7: error: Function "a.T" is not valid as a type -main:7: note: Perhaps you need "Callable[...]" or a callback protocol? -main:10: error: Function "a.T" is not valid as a type -main:10: note: Perhaps you need "Callable[...]" or a callback protocol? -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:6: error: Function "a.T" is not valid as a type +main:6: note: Perhaps you need "Callable[...]" or a callback protocol? +main:9: error: "C" expects no type arguments, but 1 given +main:9: error: Function "a.T" is not valid as a type +main:9: note: Perhaps you need "Callable[...]" or a callback protocol? +main:12: error: Function "a.T" is not valid as a type +main:12: note: Perhaps you need "Callable[...]" or a callback protocol? +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeVarToModule] import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5576,12 +5576,15 @@ import T [out] == == -main:4: error: "C" expects no type arguments, but 1 given -main:4: error: Module "T" is not valid as a type -main:6: error: Free type variable expected in Generic[...] -main:7: error: Module "T" is not valid as a type -main:10: error: Module "T" is not valid as a type -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:6: error: Module "T" is not valid as a type +main:6: note: Perhaps you meant to use a protocol matching the module structure? +main:9: error: "C" expects no type arguments, but 1 given +main:9: error: Module "T" is not valid as a type +main:9: note: Perhaps you meant to use a protocol matching the module structure? +main:12: error: Module "T" is not valid as a type +main:12: note: Perhaps you meant to use a protocol matching the module structure? +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeClassToModule] @@ -5604,18 +5607,22 @@ import C == == main:3: error: Module "C" is not valid as a type +main:3: note: Perhaps you meant to use a protocol matching the module structure? main:5: error: Module not callable main:8: error: Module "C" is not valid as a type +main:8: note: Perhaps you meant to use a protocol matching the module structure? [case testChangeTypeVarToTypeAlias] import a from typing import Generic -Alias = C[C[a.T]] class C(Generic[a.T]): def meth(self, x: a.T) -> None: pass + +Alias = C[C[a.T]] + def outer() -> None: def func(x: a.T) -> Alias[a.T]: pass @@ -5627,9 +5634,9 @@ from typing import TypeVar T = int [out] == -main:4: error: "C" expects no type arguments, but 1 given -main:6: error: Free type variable expected in Generic[...] -main:10: error: Bad number of arguments for type alias, expected: 0, given: 1 +main:5: error: Free type variable expected in Generic[...] +main:9: error: "C" expects no type arguments, but 1 given +main:12: error: Bad number of arguments for type alias, expected: 0, given: 1 [case testChangeTypeAliasToModule] @@ -5655,8 +5662,10 @@ import D == == main:3: error: Module "D" is not valid as a type +main:3: note: Perhaps you meant to use a protocol matching the module structure? main:5: error: Module not callable main:8: error: Module "D" is not valid as a type +main:8: note: Perhaps you meant to use a protocol matching the module structure? [case testChangeTypeAliasToModuleUnqualified] @@ -5682,8 +5691,10 @@ import D == == main:3: error: Module "D" is not valid as a type +main:3: note: Perhaps you meant to use a protocol matching the module structure? main:5: error: Module not callable main:8: error: Module "D" is not valid as a type +main:8: note: Perhaps you meant to use a protocol matching the module structure? [case testChangeFunctionToVariableAndRefreshUsingStaleDependency] import a @@ -7942,7 +7953,7 @@ class Foo(a.I): == [case testImplicitOptionalRefresh1] -# flags: --strict-optional +# flags: --strict-optional --implicit-optional from x import f def foo(x: int = None) -> None: f() @@ -8194,6 +8205,7 @@ x = 1 == [case testIdLikeDecoForwardCrashAlias] +# flags: --disable-error-code used-before-def import b [file b.py] from typing import Callable, Any, TypeVar @@ -8691,8 +8703,8 @@ main:2: note: Revealed type is "builtins.int" == main:2: note: Revealed type is "Literal[1]" == -mod.py:2: error: Incompatible types in assignment (expression has type "Literal[2]", variable has type "Literal[1]") main:2: note: Revealed type is "Literal[1]" +mod.py:2: error: Incompatible types in assignment (expression has type "Literal[2]", variable has type "Literal[1]") [case testLiteralFineGrainedFunctionConversion] from mod import foo @@ -9180,10 +9192,10 @@ a.py:1: error: Type signature has too few arguments a.py:5: error: Type signature has too few arguments a.py:11: error: Type signature has too few arguments == +c.py:1: error: Type signature has too few arguments a.py:1: error: Type signature has too few arguments a.py:5: error: Type signature has too few arguments a.py:11: error: Type signature has too few arguments -c.py:1: error: Type signature has too few arguments [case testErrorReportingNewAnalyzer] # flags: --disallow-any-generics @@ -9795,11 +9807,11 @@ class ExampleClass(Generic[T]): [case testDataclassCheckTypeVarBoundsInReprocess] # flags: --python-version 3.7 from dataclasses import dataclass -from typing import Protocol, Dict, TypeVar, Generic +from typing import ClassVar, Protocol, Dict, TypeVar, Generic from m import x class DataclassProtocol(Protocol): - __dataclass_fields__: Dict + __dataclass_fields__: ClassVar[Dict] T = TypeVar("T", bound=DataclassProtocol) @@ -9818,3 +9830,514 @@ x: str [builtins fixtures/dataclasses.pyi] [out] == + +[case testParamSpecCached] +import a + +[file a.py] +import b + +def f(x: int) -> str: return 'x' + +b.foo(f) + +[file a.py.2] +import b + +def f(x: int) -> str: return 'x' + +reveal_type(b.foo(f)) + +[file b.py] +from typing import TypeVar, Callable, Union +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + +def foo(f: Callable[P, T]) -> Callable[P, Union[T, None]]: + return f + +[file b.py.2] +from typing import TypeVar, Callable, Union +from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + +def foo(f: Callable[P, T]) -> Callable[P, Union[T, None]]: + return f + +x = 0 # Arbitrary change to trigger reprocessing + +[builtins fixtures/dict.pyi] +[out] +== +a.py:5: note: Revealed type is "def (x: builtins.int) -> builtins.str" + +[case testTypeVarTupleCached] +import a + +[file a.py] +import b + +def f(x: int) -> str: return 'x' + +b.foo((1, 'x')) + +[file a.py.2] +import b + +reveal_type(b.foo((1, 'x'))) + +[file b.py] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +def foo(t: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: + return t + +[file b.py.2] +from typing import Tuple +from typing_extensions import TypeVarTuple, Unpack + +Ts = TypeVarTuple("Ts") + +def foo(t: Tuple[Unpack[Ts]]) -> Tuple[Unpack[Ts]]: + return t + +x = 0 # Arbitrary change to trigger reprocessing +[builtins fixtures/dict.pyi] +[out] +== +a.py:3: note: Revealed type is "Tuple[Literal[1]?, Literal['x']?]" + +[case testUnpackKwargsUpdateFine] +import m +[file shared.py] +from typing_extensions import TypedDict + +class Person(TypedDict): + name: str + age: int + +[file shared.py.2] +from typing_extensions import TypedDict + +class Person(TypedDict): + name: str + age: str + +[file lib.py] +from typing_extensions import Unpack +from shared import Person + +def foo(**kwargs: Unpack[Person]): + ... +[file m.py] +from lib import foo +foo(name='Jennifer', age=38) + +[builtins fixtures/dict.pyi] +[out] +== +m.py:2: error: Argument "age" to "foo" has incompatible type "int"; expected "str" + +[case testModuleAsProtocolImplementationFine] +import m +[file m.py] +from typing import Protocol +from lib import C + +class Options(Protocol): + timeout: int + def update(self) -> bool: ... + +def setup(options: Options) -> None: ... +setup(C().config) + +[file lib.py] +import default_config + +class C: + config = default_config + +[file default_config.py] +timeout = 100 +def update() -> bool: ... + +[file default_config.py.2] +timeout = 100 +def update() -> str: ... +[builtins fixtures/module.pyi] +[out] +== +m.py:9: error: Argument 1 to "setup" has incompatible type Module; expected "Options" +m.py:9: note: Following member(s) of Module "default_config" have conflicts: +m.py:9: note: Expected: +m.py:9: note: def update() -> bool +m.py:9: note: Got: +m.py:9: note: def update() -> str + +[case testBoundGenericMethodFine] +import main +[file main.py] +import lib +[file main.py.3] +import lib +reveal_type(lib.foo(42)) +[file lib/__init__.pyi] +from lib import context +foo = context.test.foo +[file lib/context.pyi] +from typing import TypeVar +import lib.other + +T = TypeVar("T") +class Test: + def foo(self, x: T, n: lib.other.C = ...) -> T: ... +test: Test + +[file lib/other.pyi] +class C: ... +[file lib/other.pyi.2] +class B: ... +class C(B): ... +[out] +== +== +main.py:2: note: Revealed type is "builtins.int" + +[case testBoundGenericMethodParamSpecFine] +import main +[file main.py] +import lib +[file main.py.3] +from typing import Callable +import lib +f: Callable[[], int] +reveal_type(lib.foo(f)) +[file lib/__init__.pyi] +from lib import context +foo = context.test.foo +[file lib/context.pyi] +from typing_extensions import ParamSpec +from typing import Callable +import lib.other + +P = ParamSpec("P") +class Test: + def foo(self, x: Callable[P, int], n: lib.other.C = ...) -> Callable[P, str]: ... +test: Test + +[file lib/other.pyi] +class C: ... +[file lib/other.pyi.2] +class B: ... +class C(B): ... +[builtins fixtures/dict.pyi] +[out] +== +== +main.py:4: note: Revealed type is "def () -> builtins.str" + +[case testAbstractBodyTurnsEmpty] +# flags: --strict-optional +from b import Base + +class Sub(Base): + def meth(self) -> int: + return super().meth() + +[file b.py] +from abc import abstractmethod +class Base: + @abstractmethod + def meth(self) -> int: return 0 + +[file b.py.2] +from abc import abstractmethod +class Base: + @abstractmethod + def meth(self) -> int: ... +[out] +== +main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe + +[case testAbstractBodyTurnsEmptyProtocol] +# flags: --strict-optional +from b import Base + +class Sub(Base): + def meth(self) -> int: + return super().meth() + +[file b.py] +from typing import Protocol +class Base(Protocol): + def meth(self) -> int: return 0 +[file b.py.2] +from typing import Protocol +class Base(Protocol): + def meth(self) -> int: ... +[out] +== +main:6: error: Call to abstract method "meth" of "Base" with trivial body via super() is unsafe + +[case testPrettyMessageSorting] +# flags: --pretty +import a + +[file a.py] +1 + '' +import b + +[file b.py] +object + 1 + +[file b.py.2] +object + 1 +1() + +[out] +b.py:1: error: Unsupported left operand type for + ("Type[object]") + object + 1 + ^ +a.py:1: error: Unsupported operand types for + ("int" and "str") + 1 + '' + ^ +== +b.py:1: error: Unsupported left operand type for + ("Type[object]") + object + 1 + ^ +b.py:2: error: "int" not callable + 1() + ^ +a.py:1: error: Unsupported operand types for + ("int" and "str") + 1 + '' + ^ +[out version>=3.8] +b.py:1: error: Unsupported left operand type for + ("Type[object]") + object + 1 + ^~~~~~~~~~ +a.py:1: error: Unsupported operand types for + ("int" and "str") + 1 + '' + ^~ +== +b.py:1: error: Unsupported left operand type for + ("Type[object]") + object + 1 + ^~~~~~~~~~ +b.py:2: error: "int" not callable + 1() + ^~~ +a.py:1: error: Unsupported operand types for + ("int" and "str") + 1 + '' + ^~ + +[case testTypingSelfFine] +import m +[file lib.py] +from typing import Any + +class C: + def meth(self, other: Any) -> C: ... +[file lib.py.2] +from typing import Self + +class C: + def meth(self, other: Self) -> Self: ... + +[file n.py] +import lib +class D(lib.C): ... +[file m.py] +from n import D +d = D() +def test() -> None: + d.meth(42) +[out] +== +m.py:4: error: Argument 1 to "meth" of "C" has incompatible type "int"; expected "D" + +[case testNoNestedDefinitionCrash] +import m +[file m.py] +from typing import Any, TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def __init__(self, **kw: Any): ... + +C +[file m.py.2] +from typing import Any, TYPE_CHECKING + +class C: + if TYPE_CHECKING: + def __init__(self, **kw: Any): ... + +C +# change +[builtins fixtures/dict.pyi] +[out] +== + +[case testNoNestedDefinitionCrash2] +import m +[file m.py] +from typing import Any + +class C: + try: + def __init__(self, **kw: Any): ... + except: + pass + +C +[file m.py.2] +from typing import Any + +class C: + try: + def __init__(self, **kw: Any): ... + except: + pass + +C +# change +[builtins fixtures/dict.pyi] +[out] +== + +[case testNamedTupleNestedCrash] +import m +[file m.py] +from typing import NamedTuple + +class NT(NamedTuple): + class C: ... + x: int + y: int + +[file m.py.2] +from typing import NamedTuple + +class NT(NamedTuple): + class C: ... + x: int + y: int +# change +[builtins fixtures/tuple.pyi] +[out] +m.py:4: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +== +m.py:4: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + +[case testNamedTupleNestedClassRecheck] +import n +[file n.py] +import m +x: m.NT +[file m.py] +from typing import NamedTuple +from f import A + +class NT(NamedTuple): + class C: ... + x: int + y: A + +[file f.py] +A = int +[file f.py.2] +A = str +[builtins fixtures/tuple.pyi] +[out] +m.py:5: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" +== +m.py:5: error: Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]" + +[case testTypedDictNestedClassRecheck] +import n +[file n.py] +import m +x: m.TD +[file m.py] +from typing_extensions import TypedDict +from f import A + +class TD(TypedDict): + class C: ... + x: int + y: A + +[file f.py] +A = int +[file f.py.2] +A = str +[builtins fixtures/dict.pyi] +[out] +m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" +== +m.py:5: error: Invalid statement in TypedDict definition; expected "field_name: field_type" + +[case testTypeAliasWithNewStyleUnionChangedToVariable] +# flags: --python-version 3.10 +import a + +[file a.py] +from b import C, D +A = C | D +a: A +reveal_type(a) + +[file b.py] +C = int +D = str + +[file b.py.2] +C = "x" +D = "y" + +[file b.py.3] +C = str +D = int +[out] +a.py:4: note: Revealed type is "Union[builtins.int, builtins.str]" +== +a.py:2: error: Unsupported left operand type for | ("str") +a.py:3: error: Variable "a.A" is not valid as a type +a.py:3: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases +a.py:4: note: Revealed type is "A?" +== +a.py:4: note: Revealed type is "Union[builtins.str, builtins.int]" + +[case testUnionOfSimilarCallablesCrash] +import b + +[file b.py] +from a import x + +[file m.py] +from typing import Union, TypeVar + +T = TypeVar("T") +S = TypeVar("S") +def foo(x: T, y: S) -> Union[T, S]: ... +def f(x: int) -> int: ... +def g(*x: int) -> int: ... + +[file a.py] +from m import f, g, foo +x = foo(f, g) + +[file a.py.2] +from m import f, g, foo +x = foo(f, g) +reveal_type(x) +[builtins fixtures/tuple.pyi] +[out] +== +a.py:3: note: Revealed type is "Union[def (x: builtins.int) -> builtins.int, def (*x: builtins.int) -> builtins.int]" diff --git a/test-data/unit/fixtures/__init_subclass__.pyi b/test-data/unit/fixtures/__init_subclass__.pyi index c5a17f60688e..b4618c28249e 100644 --- a/test-data/unit/fixtures/__init_subclass__.pyi +++ b/test-data/unit/fixtures/__init_subclass__.pyi @@ -11,3 +11,4 @@ class int: pass class bool: pass class str: pass class function: pass +class dict: pass diff --git a/test-data/unit/fixtures/__new__.pyi b/test-data/unit/fixtures/__new__.pyi index bb4788df8fe9..401de6fb9cd1 100644 --- a/test-data/unit/fixtures/__new__.pyi +++ b/test-data/unit/fixtures/__new__.pyi @@ -16,3 +16,4 @@ class int: pass class bool: pass class str: pass class function: pass +class dict: pass diff --git a/test-data/unit/fixtures/alias.pyi b/test-data/unit/fixtures/alias.pyi index 08b145f4efd1..2ec7703f00c4 100644 --- a/test-data/unit/fixtures/alias.pyi +++ b/test-data/unit/fixtures/alias.pyi @@ -12,3 +12,5 @@ class str: pass class function: pass bytes = str + +class dict: pass diff --git a/test-data/unit/fixtures/any.pyi b/test-data/unit/fixtures/any.pyi index d6d90b7b3e98..b1f8d83bf524 100644 --- a/test-data/unit/fixtures/any.pyi +++ b/test-data/unit/fixtures/any.pyi @@ -6,3 +6,5 @@ class int: pass class str: pass def any(i: Iterable[T]) -> bool: pass + +class dict: pass diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi index 8d0ecc00f4b6..9985ccf84817 100644 --- a/test-data/unit/fixtures/args.pyi +++ b/test-data/unit/fixtures/args.pyi @@ -26,6 +26,7 @@ class list(Sequence[T], Generic[T]): pass class int: def __eq__(self, o: object) -> bool: pass +class float: pass class str: pass class bytes: pass class bool: pass diff --git a/test-data/unit/fixtures/attr.pyi b/test-data/unit/fixtures/attr.pyi index c209abfef0d9..3bd4f0ec7cbe 100644 --- a/test-data/unit/fixtures/attr.pyi +++ b/test-data/unit/fixtures/attr.pyi @@ -23,6 +23,7 @@ class complex: def __init__(self, real: str = ...) -> None: ... class str: pass -class unicode: pass class ellipsis: pass class tuple: pass +class list: pass +class dict: pass diff --git a/test-data/unit/fixtures/bool.pyi b/test-data/unit/fixtures/bool.pyi index 245526d78907..bc58a22b952b 100644 --- a/test-data/unit/fixtures/bool.pyi +++ b/test-data/unit/fixtures/bool.pyi @@ -14,7 +14,7 @@ class int: pass class bool(int): pass class float: pass class str: pass -class unicode: pass class ellipsis: pass -class list: pass +class list(Generic[T]): pass class property: pass +class dict: pass diff --git a/test-data/unit/fixtures/bool_py2.pyi b/test-data/unit/fixtures/bool_py2.pyi deleted file mode 100644 index b2c935132d57..000000000000 --- a/test-data/unit/fixtures/bool_py2.pyi +++ /dev/null @@ -1,16 +0,0 @@ -# builtins stub used in boolean-related test cases. -from typing import Generic, TypeVar -import sys -T = TypeVar('T') - -class object: - def __init__(self) -> None: pass - -class type: pass -class tuple(Generic[T]): pass -class function: pass -class bool: pass -class int: pass -class str: pass -class unicode: pass -class ellipsis: pass diff --git a/test-data/unit/fixtures/callable.pyi b/test-data/unit/fixtures/callable.pyi index 4ad72bee93ec..44abf0691ceb 100644 --- a/test-data/unit/fixtures/callable.pyi +++ b/test-data/unit/fixtures/callable.pyi @@ -28,3 +28,4 @@ class str: def __eq__(self, other: 'str') -> bool: pass class ellipsis: pass class list: ... +class dict: pass diff --git a/test-data/unit/fixtures/classmethod.pyi b/test-data/unit/fixtures/classmethod.pyi index 03ad803890a3..97e018b1dc1c 100644 --- a/test-data/unit/fixtures/classmethod.pyi +++ b/test-data/unit/fixtures/classmethod.pyi @@ -26,3 +26,6 @@ class bool: pass class ellipsis: pass class tuple(typing.Generic[_T]): pass + +class list: pass +class dict: pass diff --git a/test-data/unit/fixtures/complex.pyi b/test-data/unit/fixtures/complex.pyi index bcd03a2562e5..880ec3dd4d9d 100644 --- a/test-data/unit/fixtures/complex.pyi +++ b/test-data/unit/fixtures/complex.pyi @@ -10,3 +10,4 @@ class int: pass class float: pass class complex: pass class str: pass +class dict: pass diff --git a/test-data/unit/fixtures/complex_tuple.pyi b/test-data/unit/fixtures/complex_tuple.pyi index 6be46ac34573..81f1d33d1207 100644 --- a/test-data/unit/fixtures/complex_tuple.pyi +++ b/test-data/unit/fixtures/complex_tuple.pyi @@ -13,3 +13,4 @@ class float: pass class complex: pass class str: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi index 206843a88b24..e9394c84ba7d 100644 --- a/test-data/unit/fixtures/dataclasses.pyi +++ b/test-data/unit/fixtures/dataclasses.pyi @@ -10,6 +10,7 @@ VT = TypeVar('VT') class object: def __init__(self) -> None: pass + def __init_subclass__(cls) -> None: pass def __eq__(self, o: object) -> bool: pass def __ne__(self, o: object) -> bool: pass @@ -18,6 +19,7 @@ class ellipsis: pass class tuple(Generic[_T]): pass class int: pass class float: pass +class bytes: pass class str: pass class bool(int): pass @@ -37,7 +39,11 @@ class dict(Mapping[KT, VT]): def get(self, k: KT, default: Union[KT, _T]) -> Union[VT, _T]: pass def __len__(self) -> int: ... -class list(Generic[_T], Sequence[_T]): pass +class list(Generic[_T], Sequence[_T]): + def __contains__(self, item: object) -> int: pass + def __getitem__(self, key: int) -> _T: pass + def __iter__(self) -> Iterator[_T]: pass + class function: pass class classmethod: pass property = object() diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index f4ec15e4fa9a..153832411f50 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -29,7 +29,7 @@ class dict(Mapping[KT, VT]): @overload def get(self, k: KT) -> Optional[VT]: pass @overload - def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass + def get(self, k: KT, default: Union[VT, T]) -> Union[VT, T]: pass def __len__(self) -> int: ... class int: # for convenience @@ -41,7 +41,6 @@ class int: # for convenience imag: int class str: pass # for keyword argument key type -class unicode: pass # needed for py2 docstrings class bytes: pass class list(Sequence[T]): # needed by some test cases diff --git a/test-data/unit/fixtures/divmod.pyi b/test-data/unit/fixtures/divmod.pyi index cf41c500f49b..4d81d8fb47a2 100644 --- a/test-data/unit/fixtures/divmod.pyi +++ b/test-data/unit/fixtures/divmod.pyi @@ -19,3 +19,5 @@ class ellipsis: pass _N = TypeVar('_N', int, float) def divmod(_x: _N, _y: _N) -> Tuple[_N, _N]: ... + +class dict: pass diff --git a/test-data/unit/fixtures/exception.pyi b/test-data/unit/fixtures/exception.pyi index bf6d21c8716e..08496e4e5934 100644 --- a/test-data/unit/fixtures/exception.pyi +++ b/test-data/unit/fixtures/exception.pyi @@ -1,3 +1,4 @@ +import sys from typing import Generic, TypeVar T = TypeVar('T') @@ -5,19 +6,24 @@ class object: def __init__(self): pass class type: pass -class tuple(Generic[T]): pass +class tuple(Generic[T]): + def __ge__(self, other: object) -> bool: ... +class list: pass +class dict: pass class function: pass class int: pass class str: pass -class unicode: pass class bool: pass class ellipsis: pass -# Note: this is a slight simplification. In Python 2, the inheritance hierarchy -# is actually Exception -> StandardError -> RuntimeError -> ... class BaseException: def __init__(self, *args: object) -> None: ... class Exception(BaseException): pass class RuntimeError(Exception): pass class NotImplementedError(RuntimeError): pass +if sys.version_info >= (3, 11): + _BT_co = TypeVar("_BT_co", bound=BaseException, covariant=True) + _T_co = TypeVar("_T_co", bound=Exception, covariant=True) + class BaseExceptionGroup(BaseException, Generic[_BT_co]): ... + class ExceptionGroup(BaseExceptionGroup[_T_co], Exception): ... diff --git a/test-data/unit/fixtures/f_string.pyi b/test-data/unit/fixtures/f_string.pyi index 78d39aee85b8..328c666b7ece 100644 --- a/test-data/unit/fixtures/f_string.pyi +++ b/test-data/unit/fixtures/f_string.pyi @@ -34,3 +34,5 @@ class str: def format(self, *args) -> str: pass def join(self, l: List[str]) -> str: pass + +class dict: pass diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi index b2e104ccfceb..e454a27a5ebd 100644 --- a/test-data/unit/fixtures/fine_grained.pyi +++ b/test-data/unit/fixtures/fine_grained.pyi @@ -27,3 +27,4 @@ class tuple(Generic[T]): pass class function: pass class ellipsis: pass class list(Generic[T]): pass +class dict: pass diff --git a/test-data/unit/fixtures/float.pyi b/test-data/unit/fixtures/float.pyi index 880b16a2321b..5db4525849c0 100644 --- a/test-data/unit/fixtures/float.pyi +++ b/test-data/unit/fixtures/float.pyi @@ -34,3 +34,5 @@ class float: def __int__(self) -> int: ... def __mul__(self, x: float) -> float: ... def __rmul__(self, x: float) -> float: ... + +class dict: pass diff --git a/test-data/unit/fixtures/floatdict_python2.pyi b/test-data/unit/fixtures/floatdict_python2.pyi deleted file mode 100644 index f177355d5d4b..000000000000 --- a/test-data/unit/fixtures/floatdict_python2.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from typing import TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union - -T = TypeVar('T') -KT = TypeVar('KT') -VT = TypeVar('VT') - -Any = 0 - -class object: - def __init__(self) -> None: pass - -class type: - def __init__(self, x: Any) -> None: pass - -class str: - def __add__(self, other: 'str') -> 'str': pass - def __rmul__(self, n: int) -> str: ... - -class unicode: pass - -class tuple(Generic[T]): pass -class slice: pass -class function: pass - -class ellipsis: pass - -class list(Iterable[T], Generic[T]): - @overload - def __init__(self) -> None: pass - @overload - def __init__(self, x: Iterable[T]) -> None: pass - def __iter__(self) -> Iterator[T]: pass - def __add__(self, x: list[T]) -> list[T]: pass - def __mul__(self, x: int) -> list[T]: pass - def __getitem__(self, x: int) -> T: pass - def append(self, x: T) -> None: pass - def extend(self, x: Iterable[T]) -> None: pass - -class dict(Mapping[KT, VT], Generic[KT, VT]): - @overload - def __init__(self, **kwargs: VT) -> None: pass - @overload - def __init__(self, arg: Iterable[Tuple[KT, VT]], **kwargs: VT) -> None: pass - def __setitem__(self, k: KT, v: VT) -> None: pass - def __getitem__(self, k: KT) -> VT: pass - def __iter__(self) -> Iterator[KT]: pass - def update(self, a: Mapping[KT, VT]) -> None: pass - @overload - def get(self, k: KT) -> Optional[VT]: pass - @overload - def get(self, k: KT, default: Union[KT, T]) -> Union[VT, T]: pass - - -class int: - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __mul__(self, x: int) -> int: ... - def __rmul__(self, x: int) -> int: ... - def __truediv__(self, x: int) -> int: ... - def __rtruediv__(self, x: int) -> int: ... - -class float: - def __float__(self) -> float: ... - def __int__(self) -> int: ... - def __mul__(self, x: float) -> float: ... - def __rmul__(self, x: float) -> float: ... - def __truediv__(self, x: float) -> float: ... - def __rtruediv__(self, x: float) -> float: ... diff --git a/test-data/unit/fixtures/for.pyi b/test-data/unit/fixtures/for.pyi index 31f6de78d486..694f83e940b2 100644 --- a/test-data/unit/fixtures/for.pyi +++ b/test-data/unit/fixtures/for.pyi @@ -18,3 +18,4 @@ class str: pass # for convenience class list(Iterable[t], Generic[t]): def __iter__(self) -> Iterator[t]: pass +class dict: pass diff --git a/test-data/unit/fixtures/function.pyi b/test-data/unit/fixtures/function.pyi index c00a7846628a..697d0d919d98 100644 --- a/test-data/unit/fixtures/function.pyi +++ b/test-data/unit/fixtures/function.pyi @@ -5,3 +5,4 @@ class type: pass class function: pass class int: pass class str: pass +class dict: pass diff --git a/test-data/unit/fixtures/isinstance.pyi b/test-data/unit/fixtures/isinstance.pyi index 7f7cf501b5de..c1125c24b941 100644 --- a/test-data/unit/fixtures/isinstance.pyi +++ b/test-data/unit/fixtures/isinstance.pyi @@ -14,6 +14,7 @@ class function: pass def isinstance(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass def issubclass(x: object, t: Union[Type[object], Tuple[Type[object], ...]]) -> bool: pass +def hasattr(x: object, name: str) -> bool: pass class int: def __add__(self, other: 'int') -> 'int': pass @@ -24,3 +25,5 @@ class str: class ellipsis: pass NotImplemented = cast(Any, None) + +class dict: pass diff --git a/test-data/unit/fixtures/isinstance_python3_10.pyi b/test-data/unit/fixtures/isinstance_python3_10.pyi index abb37ea81c00..7c919a216bfb 100644 --- a/test-data/unit/fixtures/isinstance_python3_10.pyi +++ b/test-data/unit/fixtures/isinstance_python3_10.pyi @@ -27,3 +27,5 @@ class str: class ellipsis: pass NotImplemented = cast(Any, None) + +class dict: pass diff --git a/test-data/unit/fixtures/list.pyi b/test-data/unit/fixtures/list.pyi index 31dc333b3d4f..90fbabe8bc92 100644 --- a/test-data/unit/fixtures/list.pyi +++ b/test-data/unit/fixtures/list.pyi @@ -36,3 +36,5 @@ class str: class bool(int): pass property = object() # Dummy definition. + +class dict: pass diff --git a/test-data/unit/fixtures/module.pyi b/test-data/unit/fixtures/module.pyi index ac1d3688ed12..47408befd5ce 100644 --- a/test-data/unit/fixtures/module.pyi +++ b/test-data/unit/fixtures/module.pyi @@ -19,3 +19,5 @@ class ellipsis: pass classmethod = object() staticmethod = object() +property = object() +def hasattr(x: object, name: str) -> bool: pass diff --git a/test-data/unit/fixtures/module_all.pyi b/test-data/unit/fixtures/module_all.pyi index 87959fefbff5..b14152c7e98f 100644 --- a/test-data/unit/fixtures/module_all.pyi +++ b/test-data/unit/fixtures/module_all.pyi @@ -16,3 +16,4 @@ class list(Generic[_T], Sequence[_T]): def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass class tuple(Generic[_T]): pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/module_all_python2.pyi b/test-data/unit/fixtures/module_all_python2.pyi deleted file mode 100644 index 989333c5f41a..000000000000 --- a/test-data/unit/fixtures/module_all_python2.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Generic, Sequence, TypeVar -_T = TypeVar('_T') - -class object: - def __init__(self) -> None: pass -class type: pass -class function: pass -class int: pass -class str: pass -class unicode: pass -class list(Generic[_T], Sequence[_T]): - def append(self, x: _T): pass - def extend(self, x: Sequence[_T]): pass - def __add__(self, rhs: Sequence[_T]) -> list[_T]: pass -class tuple(Generic[_T]): pass diff --git a/test-data/unit/fixtures/notimplemented.pyi b/test-data/unit/fixtures/notimplemented.pyi index e619a6c5ad85..2ca376ea0760 100644 --- a/test-data/unit/fixtures/notimplemented.pyi +++ b/test-data/unit/fixtures/notimplemented.pyi @@ -11,3 +11,4 @@ class bool: pass class int: pass class str: pass NotImplemented = cast(Any, None) +class dict: pass diff --git a/test-data/unit/fixtures/object_hashable.pyi b/test-data/unit/fixtures/object_hashable.pyi index 592cba808cbf..49b17991f01c 100644 --- a/test-data/unit/fixtures/object_hashable.pyi +++ b/test-data/unit/fixtures/object_hashable.pyi @@ -7,3 +7,4 @@ class float: ... class str: ... class ellipsis: ... class tuple: ... +class dict: pass diff --git a/test-data/unit/fixtures/ops.pyi b/test-data/unit/fixtures/ops.pyi index d5845aba43c6..9cc4d22eb0a7 100644 --- a/test-data/unit/fixtures/ops.pyi +++ b/test-data/unit/fixtures/ops.pyi @@ -33,8 +33,6 @@ class str: def startswith(self, x: 'str') -> bool: pass def strip(self) -> 'str': pass -class unicode: pass - class int: def __add__(self, x: 'int') -> 'int': pass def __radd__(self, x: 'int') -> 'int': pass @@ -74,3 +72,5 @@ def __print(a1: object = None, a2: object = None, a3: object = None, a4: object = None) -> None: pass class ellipsis: pass + +class dict: pass diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index c72838535443..90d76b9d76dd 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -1,5 +1,5 @@ # builtins stub with non-generic primitive types -from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, overload +from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, Tuple, Union T = TypeVar('T') V = TypeVar('V') @@ -20,7 +20,9 @@ class int: def __rmul__(self, x: int) -> int: pass class float: def __float__(self) -> float: pass -class complex: pass + def __add__(self, x: float) -> float: pass +class complex: + def __add__(self, x: complex) -> complex: pass class bool(int): pass class str(Sequence[str]): def __add__(self, s: str) -> str: pass @@ -57,12 +59,11 @@ class function: pass class ellipsis: pass class range(Sequence[int]): - @overload - def __init__(self, stop: int) -> None: pass - @overload - def __init__(self, start: int, stop: int, step: int = ...) -> None: pass + def __init__(self, __x: int, __y: int = ..., __z: int = ...) -> None: pass def count(self, value: int) -> int: pass def index(self, value: int) -> int: pass def __getitem__(self, i: int) -> int: pass def __iter__(self) -> Iterator[int]: pass def __contains__(self, other: object) -> bool: pass + +def isinstance(x: object, t: Union[type, Tuple]) -> bool: pass diff --git a/test-data/unit/fixtures/property.pyi b/test-data/unit/fixtures/property.pyi index 9dca0d50a3be..2397c05c78d5 100644 --- a/test-data/unit/fixtures/property.pyi +++ b/test-data/unit/fixtures/property.pyi @@ -13,6 +13,7 @@ class function: pass property = object() # Dummy definition class classmethod: pass +class list: pass class dict: pass class int: pass class str: pass diff --git a/test-data/unit/fixtures/property_py2.pyi b/test-data/unit/fixtures/property_py2.pyi deleted file mode 100644 index 3b0ab69cf43f..000000000000 --- a/test-data/unit/fixtures/property_py2.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import typing - -_T = typing.TypeVar('_T') - -class object: - def __init__(self) -> None: pass - -class type: - def __init__(self, x: typing.Any) -> None: pass - -class function: pass - -property = object() # Dummy definition - -class int: pass -class str: pass -class unicode: pass -class bool: pass -class ellipsis: pass - -class tuple(typing.Generic[_T]): pass diff --git a/test-data/unit/fixtures/python2.pyi b/test-data/unit/fixtures/python2.pyi deleted file mode 100644 index 51af59c8bd45..000000000000 --- a/test-data/unit/fixtures/python2.pyi +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Generic, Iterable, TypeVar, Sequence, Iterator - -class object: - def __init__(self) -> None: pass - def __eq__(self, other: object) -> bool: pass - def __ne__(self, other: object) -> bool: pass - -class type: - def __init__(self, x) -> None: pass - -class function: pass - -class int: pass -class float: pass -class str: - def format(self, *args, **kwars) -> str: ... -class unicode: - def format(self, *args, **kwars) -> unicode: ... -class bool(int): pass - -bytes = str - -T = TypeVar('T') -S = TypeVar('S') -class list(Iterable[T], Generic[T]): - def __iter__(self) -> Iterator[T]: pass - def __getitem__(self, item: int) -> T: pass -class tuple(Iterable[T]): - def __iter__(self) -> Iterator[T]: pass -class dict(Generic[T, S]): pass - -class bytearray(Sequence[int]): - def __init__(self, string: str) -> None: pass - def __contains__(self, item: object) -> bool: pass - def __iter__(self) -> Iterator[int]: pass - def __getitem__(self, item: int) -> int: pass - -# Definition of None is implicit diff --git a/test-data/unit/fixtures/set.pyi b/test-data/unit/fixtures/set.pyi index 9852bbc9fcc6..71d3bd2eee18 100644 --- a/test-data/unit/fixtures/set.pyi +++ b/test-data/unit/fixtures/set.pyi @@ -6,6 +6,7 @@ T = TypeVar('T') class object: def __init__(self) -> None: pass + def __eq__(self, other): pass class type: pass class tuple(Generic[T]): pass @@ -24,3 +25,5 @@ class set(Iterable[T], Generic[T]): def add(self, x: T) -> None: pass def discard(self, x: T) -> None: pass def update(self, x: Set[T]) -> None: pass + +class dict: pass diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi index 947d49ea09fb..b5a4549da068 100644 --- a/test-data/unit/fixtures/slice.pyi +++ b/test-data/unit/fixtures/slice.pyi @@ -14,3 +14,4 @@ class str: pass class slice: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/staticmethod.pyi b/test-data/unit/fixtures/staticmethod.pyi index 7d5d98634e48..8a87121b2a71 100644 --- a/test-data/unit/fixtures/staticmethod.pyi +++ b/test-data/unit/fixtures/staticmethod.pyi @@ -16,6 +16,6 @@ class int: def from_bytes(bytes: bytes, byteorder: str) -> int: pass class str: pass -class unicode: pass class bytes: pass class ellipsis: pass +class dict: pass diff --git a/test-data/unit/fixtures/transform.pyi b/test-data/unit/fixtures/transform.pyi index afdc2bf5b59a..7dbb8fa90dbe 100644 --- a/test-data/unit/fixtures/transform.pyi +++ b/test-data/unit/fixtures/transform.pyi @@ -28,3 +28,5 @@ def __print(a1=None, a2=None, a3=None, a4=None): # Do not use *args since this would require list and break many test # cases. pass + +class dict: pass diff --git a/test-data/unit/fixtures/tuple-simple.pyi b/test-data/unit/fixtures/tuple-simple.pyi index b195dfa59729..6c816c1c5b7a 100644 --- a/test-data/unit/fixtures/tuple-simple.pyi +++ b/test-data/unit/fixtures/tuple-simple.pyi @@ -18,3 +18,4 @@ class function: pass # We need int for indexing tuples. class int: pass class str: pass # For convenience +class dict: pass diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 6f40356bb5f0..0261731304b1 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -1,6 +1,6 @@ # Builtins stub used in tuple-related test cases. -from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Any, overload, Tuple, Type +from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Optional, overload, Tuple, Type T = TypeVar("T") Tco = TypeVar('Tco', covariant=True) @@ -23,7 +23,8 @@ class tuple(Sequence[Tco], Generic[Tco]): def __rmul__(self, n: int) -> Tuple[Tco, ...]: pass def __add__(self, x: Tuple[Tco, ...]) -> Tuple[Tco, ...]: pass def count(self, obj: object) -> int: pass -class function: pass +class function: + __name__: str class ellipsis: pass class classmethod: pass @@ -35,7 +36,7 @@ class slice: pass class bool(int): pass class str: pass # For convenience class bytes: pass -class unicode: pass +class bytearray: pass class list(Sequence[T], Generic[T]): @overload @@ -47,6 +48,8 @@ class list(Sequence[T], Generic[T]): def isinstance(x: object, t: type) -> bool: pass -def sum(iterable: Iterable[T], start: T = None) -> T: pass +def sum(iterable: Iterable[T], start: Optional[T] = None) -> T: pass class BaseException: pass + +class dict: pass diff --git a/test-data/unit/fixtures/type.pyi b/test-data/unit/fixtures/type.pyi index 755b45ff0bb5..39357a693638 100644 --- a/test-data/unit/fixtures/type.pyi +++ b/test-data/unit/fixtures/type.pyi @@ -1,8 +1,9 @@ # builtins stub used in type-related test cases. -from typing import Generic, TypeVar, List, Union +from typing import Any, Generic, TypeVar, List, Union -T = TypeVar('T') +T = TypeVar("T") +S = TypeVar("S") class object: def __init__(self) -> None: pass @@ -12,12 +13,15 @@ class list(Generic[T]): pass class type(Generic[T]): __name__: str + def __call__(self, *args: Any, **kwargs: Any) -> Any: pass def __or__(self, other: Union[type, None]) -> type: pass + def __ror__(self, other: Union[type, None]) -> type: pass def mro(self) -> List['type']: pass class tuple(Generic[T]): pass +class dict(Generic[T, S]): pass class function: pass class bool: pass class int: pass class str: pass -class unicode: pass +class ellipsis: pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index dad30dd7bcee..1471473249dc 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -160,8 +160,8 @@ class SupportsAbs(Protocol[T_co]): def runtime_checkable(cls: T) -> T: return cls -class ContextManager(Generic[T]): - def __enter__(self) -> T: pass +class ContextManager(Generic[T_co]): + def __enter__(self) -> T_co: pass # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass @@ -179,3 +179,14 @@ class _TypedDict(Mapping[str, object]): def pop(self, k: NoReturn, default: T = ...) -> object: ... def update(self: T, __m: T) -> None: ... def __delitem__(self, k: NoReturn) -> None: ... + +class _SpecialForm: pass + +def dataclass_transform( + *, + eq_default: bool = ..., + order_default: bool = ..., + kw_only_default: bool = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: Any, +) -> Callable[[T], T]: ... diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 568fe057c4cf..863b0703989d 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -68,4 +68,6 @@ class ContextManager(Generic[T]): # Use Any because not all the precise types are in the fixtures. def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> Any: pass +class _SpecialForm: pass + TYPE_CHECKING = 1 diff --git a/test-data/unit/fixtures/typing-namedtuple.pyi b/test-data/unit/fixtures/typing-namedtuple.pyi index 3404dc69de44..1a31549463b6 100644 --- a/test-data/unit/fixtures/typing-namedtuple.pyi +++ b/test-data/unit/fixtures/typing-namedtuple.pyi @@ -4,6 +4,8 @@ Any = 0 overload = 0 Type = 0 Literal = 0 +Optional = 0 +Self = 0 T_co = TypeVar('T_co', covariant=True) KT = TypeVar('KT') diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi index 378570b4c19c..92ae402b9ea5 100644 --- a/test-data/unit/fixtures/typing-typeddict.pyi +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -9,6 +9,7 @@ from abc import ABCMeta cast = 0 +assert_type = 0 overload = 0 Any = 0 Union = 0 @@ -25,6 +26,7 @@ TypedDict = 0 NoReturn = 0 Required = 0 NotRequired = 0 +Self = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/fixtures/union.pyi b/test-data/unit/fixtures/union.pyi index 489e3ddb6ef9..350e145a6f8f 100644 --- a/test-data/unit/fixtures/union.pyi +++ b/test-data/unit/fixtures/union.pyi @@ -15,3 +15,4 @@ class tuple(Generic[T]): pass # We need int for indexing tuples. class int: pass class str: pass # For convenience +class dict: pass diff --git a/test-data/unit/lib-stub/__builtin__.pyi b/test-data/unit/lib-stub/__builtin__.pyi deleted file mode 100644 index e7109a179aac..000000000000 --- a/test-data/unit/lib-stub/__builtin__.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Generic, TypeVar -_T = TypeVar('_T') - -Any = 0 - -class object: - def __init__(self): - # type: () -> None - pass - -class type: - def __init__(self, x): - # type: (Any) -> None - pass - -# These are provided here for convenience. -class int: pass -class float: pass - -class str: pass -class unicode: pass - -class tuple(Generic[_T]): pass -class function: pass - -class ellipsis: pass - -def print(*args, end=''): pass - -# Definition of None is implicit diff --git a/test-data/unit/lib-stub/_decimal.pyi b/test-data/unit/lib-stub/_decimal.pyi new file mode 100644 index 000000000000..2c2c5bff11f7 --- /dev/null +++ b/test-data/unit/lib-stub/_decimal.pyi @@ -0,0 +1,4 @@ +# Very simplified decimal stubs for use in tests + +class Decimal: + def __new__(cls, value: str = ...) -> Decimal: ... diff --git a/test-data/unit/lib-stub/abc.pyi b/test-data/unit/lib-stub/abc.pyi index da90b588fca3..e60f709a5187 100644 --- a/test-data/unit/lib-stub/abc.pyi +++ b/test-data/unit/lib-stub/abc.pyi @@ -2,8 +2,8 @@ from typing import Type, Any, TypeVar T = TypeVar('T', bound=Type[Any]) -class ABC(type): pass class ABCMeta(type): def register(cls, tp: T) -> T: pass +class ABC(metaclass=ABCMeta): pass abstractmethod = object() abstractproperty = object() diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index 8c4f504fb2e7..c2ac78c41661 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -17,11 +17,17 @@ class float: pass class str: pass class bytes: pass -class function: pass +class function: + __name__: str class ellipsis: pass -from typing import Generic, Sequence, TypeVar +from typing import Generic, Iterator, Sequence, TypeVar _T = TypeVar('_T') -class list(Generic[_T], Sequence[_T]): pass +class list(Generic[_T], Sequence[_T]): + def __contains__(self, item: object) -> bool: pass + def __getitem__(self, key: int) -> _T: pass + def __iter__(self) -> Iterator[_T]: pass + +class dict: pass # Definition of None is implicit diff --git a/test-data/unit/lib-stub/contextlib.pyi b/test-data/unit/lib-stub/contextlib.pyi index e7db25da1b5f..e2a0cccd562a 100644 --- a/test-data/unit/lib-stub/contextlib.pyi +++ b/test-data/unit/lib-stub/contextlib.pyi @@ -7,6 +7,7 @@ _T = TypeVar('_T') class GeneratorContextManager(ContextManager[_T], Generic[_T]): def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ... +# This does not match `typeshed` definition, needs `ParamSpec`: def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]: ... diff --git a/test-data/unit/lib-stub/datetime.pyi b/test-data/unit/lib-stub/datetime.pyi new file mode 100644 index 000000000000..7d71682d051d --- /dev/null +++ b/test-data/unit/lib-stub/datetime.pyi @@ -0,0 +1,16 @@ +# Very simplified datetime stubs for use in tests + +class datetime: + def __new__( + cls, + year: int, + month: int, + day: int, + hour: int = ..., + minute: int = ..., + second: int = ..., + microsecond: int = ..., + *, + fold: int = ..., + ) -> datetime: ... + def __format__(self, __fmt: str) -> str: ... diff --git a/test-data/unit/lib-stub/decimal.pyi b/test-data/unit/lib-stub/decimal.pyi new file mode 100644 index 000000000000..d2ab6eda9ff1 --- /dev/null +++ b/test-data/unit/lib-stub/decimal.pyi @@ -0,0 +1,3 @@ +# Very simplified decimal stubs for use in tests + +from _decimal import * diff --git a/test-data/unit/lib-stub/functools.pyi b/test-data/unit/lib-stub/functools.pyi new file mode 100644 index 000000000000..9e62a14c2f34 --- /dev/null +++ b/test-data/unit/lib-stub/functools.pyi @@ -0,0 +1,35 @@ +from typing import Generic, TypeVar, Callable, Any, Mapping + +_T = TypeVar("_T") + +class _SingleDispatchCallable(Generic[_T]): + registry: Mapping[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +def total_ordering(cls: type[_T]) -> type[_T]: ... + +class cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + def __init__(self, func: Callable[[Any], _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + def __class_getitem__(cls, item: Any) -> Any: ... diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi index 6274163c497d..d79be8719417 100644 --- a/test-data/unit/lib-stub/mypy_extensions.pyi +++ b/test-data/unit/lib-stub/mypy_extensions.pyi @@ -1,7 +1,7 @@ # NOTE: Requires fixtures/dict.pyi from typing import ( Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator, - Union + Union, Protocol ) import sys @@ -51,10 +51,13 @@ mypyc_attr: Any class FlexibleAlias(Generic[_T, _U]): ... if sys.version_info >= (3, 0): + class __SupportsInt(Protocol[T_co]): + def __int__(self) -> int: pass + _Int = Union[int, i32, i64] class i32: - def __init__(self, x: _Int) -> None: ... + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... def __add__(self, x: i32) -> i32: ... def __radd__(self, x: i32) -> i32: ... def __sub__(self, x: i32) -> i32: ... @@ -84,7 +87,7 @@ if sys.version_info >= (3, 0): def __gt__(self, x: i32) -> bool: ... class i64: - def __init__(self, x: _Int) -> None: ... + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... def __add__(self, x: i64) -> i64: ... def __radd__(self, x: i64) -> i64: ... def __sub__(self, x: i64) -> i64: ... diff --git a/test-data/unit/lib-stub/traceback.pyi b/test-data/unit/lib-stub/traceback.pyi new file mode 100644 index 000000000000..83c1891f80f5 --- /dev/null +++ b/test-data/unit/lib-stub/traceback.pyi @@ -0,0 +1,3 @@ +# Very simplified traceback stubs for use in tests + +def print_tb(*args, **kwargs) -> None: ... diff --git a/test-data/unit/lib-stub/types.pyi b/test-data/unit/lib-stub/types.pyi index 4a6093f701cc..012fd8503377 100644 --- a/test-data/unit/lib-stub/types.pyi +++ b/test-data/unit/lib-stub/types.pyi @@ -1,4 +1,4 @@ -from typing import TypeVar +from typing import Any, TypeVar import sys _T = TypeVar('_T') @@ -6,7 +6,8 @@ _T = TypeVar('_T') def coroutine(func: _T) -> _T: pass class ModuleType: - __file__ = ... # type: str + __file__: str + def __getattr__(self, name: str) -> Any: pass if sys.version_info >= (3, 10): class Union: diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index 0a1bb42b936c..a306b70f74d7 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -27,6 +27,10 @@ NoReturn = 0 Never = 0 NewType = 0 ParamSpec = 0 +TypeVarTuple = 0 +Unpack = 0 +Self = 0 +TYPE_CHECKING = 0 T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index b82b73d49a71..22b895971521 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -1,15 +1,19 @@ -from typing import TypeVar, Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type +import typing +from typing import Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import NewType as NewType, overload as overload import sys -_T = TypeVar('_T') +_T = typing.TypeVar('_T') class _SpecialForm: def __getitem__(self, typeargs: Any) -> Any: pass + def __call__(self, arg: Any) -> Any: + pass + NamedTuple = 0 Protocol: _SpecialForm = ... def runtime_checkable(x: _T) -> _T: pass @@ -22,6 +26,8 @@ Literal: _SpecialForm = ... Annotated: _SpecialForm = ... +TypeVar: _SpecialForm + ParamSpec: _SpecialForm Concatenate: _SpecialForm @@ -51,3 +57,14 @@ class _TypedDict(Mapping[str, object]): def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... def reveal_type(__obj: T) -> T: pass + +def dataclass_transform( + *, + eq_default: bool = ..., + order_default: bool = ..., + kw_only_default: bool = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: Any, +) -> Callable[[T], T]: ... + +_FutureFeatureFixture = 0 diff --git a/test-data/unit/lib-stub/unannotated_lib.pyi b/test-data/unit/lib-stub/unannotated_lib.pyi new file mode 100644 index 000000000000..90bfb6fa47d6 --- /dev/null +++ b/test-data/unit/lib-stub/unannotated_lib.pyi @@ -0,0 +1 @@ +def f(x): ... diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index a593a064cbb2..144a095440f2 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -669,18 +669,18 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __match_args__<10> (Tuple[Literal['x']]) - __new__<11> - _asdict<12> - _field_defaults<13> (builtins.object<1>) - _field_types<14> (builtins.object<1>) - _fields<15> (Tuple[builtins.str<9>]) - _make<16> - _replace<17> - _source<18> (builtins.str<9>) - x<19> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __match_args__<11> (Tuple[Literal['x']]) + __new__<12> + _asdict<13> + _field_defaults<14> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<15> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<16> (Tuple[builtins.str<8>]) + _make<17> + _replace<18> + _source<19> (builtins.str<8>) + x<20> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -693,19 +693,19 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __match_args__<10> (Tuple[Literal['x'], Literal['y']]) - __new__<11> - _asdict<12> - _field_defaults<13> (builtins.object<1>) - _field_types<14> (builtins.object<1>) - _fields<15> (Tuple[builtins.str<9>, builtins.str<9>]) - _make<16> - _replace<17> - _source<18> (builtins.str<9>) - x<19> (target.A<0>) - y<20> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __match_args__<11> (Tuple[Literal['x'], Literal['y']]) + __new__<12> + _asdict<13> + _field_defaults<14> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<15> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<16> (Tuple[builtins.str<8>, builtins.str<8>]) + _make<17> + _replace<18> + _source<19> (builtins.str<8>) + x<20> (target.A<0>) + y<21> (target.A<0>))) [case testNamedTupleOldVersion_typeinfo] import target @@ -730,17 +730,17 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __new__<10> - _asdict<11> - _field_defaults<12> (builtins.object<1>) - _field_types<13> (builtins.object<1>) - _fields<14> (Tuple[builtins.str<9>]) - _make<15> - _replace<16> - _source<17> (builtins.str<9>) - x<18> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __new__<11> + _asdict<12> + _field_defaults<13> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<14> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<15> (Tuple[builtins.str<8>]) + _make<16> + _replace<17> + _source<18> (builtins.str<8>) + x<19> (target.A<0>))) ==> TypeInfo<0>( Name(target.A) @@ -753,18 +753,18 @@ TypeInfo<2>( Mro(target.N<2>, builtins.tuple<3>, typing.Sequence<4>, typing.Iterable<5>, builtins.object<1>) Names( _NT<6> - __annotations__<7> (builtins.object<1>) - __doc__<8> (builtins.str<9>) - __new__<10> - _asdict<11> - _field_defaults<12> (builtins.object<1>) - _field_types<13> (builtins.object<1>) - _fields<14> (Tuple[builtins.str<9>, builtins.str<9>]) - _make<15> - _replace<16> - _source<17> (builtins.str<9>) - x<18> (target.A<0>) - y<19> (target.A<0>))) + __annotations__<7> (builtins.dict[builtins.str<8>, Any]<9>) + __doc__<10> (builtins.str<8>) + __new__<11> + _asdict<12> + _field_defaults<13> (builtins.dict[builtins.str<8>, Any]<9>) + _field_types<14> (builtins.dict[builtins.str<8>, Any]<9>) + _fields<15> (Tuple[builtins.str<8>, builtins.str<8>]) + _make<16> + _replace<17> + _source<18> (builtins.str<8>) + x<19> (target.A<0>) + y<20> (target.A<0>))) [case testUnionType_types] import target diff --git a/test-data/unit/plugins/add_classmethod.py b/test-data/unit/plugins/add_classmethod.py new file mode 100644 index 000000000000..5aacc69a8f01 --- /dev/null +++ b/test-data/unit/plugins/add_classmethod.py @@ -0,0 +1,28 @@ +from typing import Callable, Optional + +from mypy.nodes import ARG_POS, Argument, Var +from mypy.plugin import ClassDefContext, Plugin +from mypy.plugins.common import add_method +from mypy.types import NoneType + + +class ClassMethodPlugin(Plugin): + def get_base_class_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: + if "BaseAddMethod" in fullname: + return add_extra_methods_hook + return None + + +def add_extra_methods_hook(ctx: ClassDefContext) -> None: + add_method(ctx, "foo_classmethod", [], NoneType(), is_classmethod=True) + add_method( + ctx, + "foo_staticmethod", + [Argument(Var(""), ctx.api.named_type("builtins.int"), None, ARG_POS)], + ctx.api.named_type("builtins.str"), + is_staticmethod=True, + ) + + +def plugin(version): + return ClassMethodPlugin diff --git a/test-data/unit/plugins/customentry.py b/test-data/unit/plugins/customentry.py index f8b86c33dcfc..b3dacfd4cf44 100644 --- a/test-data/unit/plugins/customentry.py +++ b/test-data/unit/plugins/customentry.py @@ -4,7 +4,7 @@ class MyPlugin(Plugin): def get_function_hook(self, fullname): if fullname == '__main__.f': return my_hook - assert fullname is not None + assert fullname return None def my_hook(ctx): diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index 028d2aff561f..a3413e071184 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -660,8 +660,8 @@ a + 1 [out] _testMapStr.py:4: error: No overload variant of "__add__" of "list" matches argument type "int" _testMapStr.py:4: note: Possible overload variants: -_testMapStr.py:4: note: def __add__(self, List[str]) -> List[str] -_testMapStr.py:4: note: def [_S] __add__(self, List[_S]) -> List[Union[_S, str]] +_testMapStr.py:4: note: def __add__(self, List[str], /) -> List[str] +_testMapStr.py:4: note: def [_S] __add__(self, List[_S], /) -> List[Union[_S, str]] [case testRelativeImport] import typing @@ -805,8 +805,8 @@ t + 1 [out] _program.py:3: error: No overload variant of "__add__" of "tuple" matches argument type "int" _program.py:3: note: Possible overload variants: -_program.py:3: note: def __add__(self, Tuple[str, ...]) -> Tuple[str, ...] -_program.py:3: note: def [_T] __add__(self, Tuple[_T, ...]) -> Tuple[Union[str, _T], ...] +_program.py:3: note: def __add__(self, Tuple[str, ...], /) -> Tuple[str, ...] +_program.py:3: note: def [_T] __add__(self, Tuple[_T, ...], /) -> Tuple[Union[str, _T], ...] [case testMultiplyTupleByIntegerReverse] n = 4 @@ -815,8 +815,8 @@ t + 1 [out] _program.py:3: error: No overload variant of "__add__" of "tuple" matches argument type "int" _program.py:3: note: Possible overload variants: -_program.py:3: note: def __add__(self, Tuple[str, ...]) -> Tuple[str, ...] -_program.py:3: note: def [_T] __add__(self, Tuple[_T, ...]) -> Tuple[Union[str, _T], ...] +_program.py:3: note: def __add__(self, Tuple[str, ...], /) -> Tuple[str, ...] +_program.py:3: note: def [_T] __add__(self, Tuple[_T, ...], /) -> Tuple[Union[str, _T], ...] [case testDictWithKeywordArgs] from typing import Dict, Any, List @@ -1099,8 +1099,8 @@ _testTypedDictGet.py:8: note: Revealed type is "builtins.str" _testTypedDictGet.py:9: note: Revealed type is "builtins.object" _testTypedDictGet.py:10: error: All overload variants of "get" of "Mapping" require at least one argument _testTypedDictGet.py:10: note: Possible overload variants: -_testTypedDictGet.py:10: note: def get(self, str) -> object -_testTypedDictGet.py:10: note: def [_T] get(self, str, default: object) -> object +_testTypedDictGet.py:10: note: def get(self, str, /) -> object +_testTypedDictGet.py:10: note: def [_T] get(self, str, /, default: object) -> object _testTypedDictGet.py:12: note: Revealed type is "builtins.object" [case testTypedDictMappingMethods] @@ -1130,9 +1130,9 @@ _testTypedDictMappingMethods.py:5: note: Revealed type is "builtins.str" _testTypedDictMappingMethods.py:6: note: Revealed type is "typing.Iterator[builtins.str]" _testTypedDictMappingMethods.py:7: note: Revealed type is "builtins.int" _testTypedDictMappingMethods.py:8: note: Revealed type is "builtins.bool" -_testTypedDictMappingMethods.py:9: note: Revealed type is "typing.KeysView[builtins.str]" -_testTypedDictMappingMethods.py:10: note: Revealed type is "typing.ItemsView[builtins.str, builtins.object]" -_testTypedDictMappingMethods.py:11: note: Revealed type is "typing.ValuesView[builtins.object]" +_testTypedDictMappingMethods.py:9: note: Revealed type is "_collections_abc.dict_keys[builtins.str, builtins.object]" +_testTypedDictMappingMethods.py:10: note: Revealed type is "_collections_abc.dict_items[builtins.str, builtins.object]" +_testTypedDictMappingMethods.py:11: note: Revealed type is "_collections_abc.dict_values[builtins.str, builtins.object]" _testTypedDictMappingMethods.py:12: note: Revealed type is "TypedDict('_testTypedDictMappingMethods.Cell', {'value': builtins.int})" _testTypedDictMappingMethods.py:13: note: Revealed type is "builtins.int" _testTypedDictMappingMethods.py:15: error: Unexpected TypedDict key "invalid" @@ -1561,8 +1561,9 @@ import scribe # No Python 3 stubs available for scribe from scribe import x import maxminddb # Python 3 stubs available for maxminddb import foobar_asdf +import jack # This has a stubs package but was never bundled with mypy, so ignoring works [out] -_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "maxminddb" (or incompatible with Python 3.7) +_testIgnoreImportIfNoPython3StubAvailable.py:4: error: Library stubs not installed for "maxminddb" _testIgnoreImportIfNoPython3StubAvailable.py:4: note: Hint: "python3 -m pip install types-maxminddb" _testIgnoreImportIfNoPython3StubAvailable.py:4: note: (or run "mypy --install-types" to install all missing stub packages) _testIgnoreImportIfNoPython3StubAvailable.py:4: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports @@ -1574,7 +1575,7 @@ import maxminddb [out] _testNoPython3StubAvailable.py:1: error: Cannot find implementation or library stub for module named "scribe" _testNoPython3StubAvailable.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "maxminddb" (or incompatible with Python 3.7) +_testNoPython3StubAvailable.py:3: error: Library stubs not installed for "maxminddb" _testNoPython3StubAvailable.py:3: note: Hint: "python3 -m pip install types-maxminddb" _testNoPython3StubAvailable.py:3: note: (or run "mypy --install-types" to install all missing stub packages) @@ -1620,7 +1621,6 @@ _testEnumValueWithPlaceholderNodeType.py:6: error: Incompatible types in assignm _testEnumValueWithPlaceholderNodeType.py:6: error: Name "Missing" is not defined [case testTypeshedRecursiveTypesExample] -# flags: --enable-recursive-aliases from typing import List, Union Recursive = Union[str, List["Recursive"]] @@ -1636,3 +1636,334 @@ foo("") foo(list("")) foo(list((list(""), ""))) [out] + +[case testNarrowTypeForDictKeys] +# flags: --strict-optional +from typing import Dict, KeysView, Optional + +d: Dict[str, int] +key: Optional[str] +if key in d.keys(): + reveal_type(key) +else: + reveal_type(key) + +kv: KeysView[str] +k: Optional[str] +if k in kv: + reveal_type(k) +else: + reveal_type(k) + +[out] +_testNarrowTypeForDictKeys.py:7: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:9: note: Revealed type is "Union[builtins.str, None]" +_testNarrowTypeForDictKeys.py:14: note: Revealed type is "builtins.str" +_testNarrowTypeForDictKeys.py:16: note: Revealed type is "Union[builtins.str, None]" + +[case testTypeAliasWithNewStyleUnion] +# flags: --python-version 3.10 +from typing import Literal, Type, TypeAlias, TypeVar + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +C = type[int] | str + +D = type[int] | str +x: D +reveal_type(x) +E: TypeAlias = type[int] | str +y: E +reveal_type(y) +F = list[type[int] | str] + +T = TypeVar("T", int, str) +def foo(x: T) -> T: + A = type[int] | str + a: A + return x +[out] +_testTypeAliasWithNewStyleUnion.py:5: note: Revealed type is "typing._SpecialForm" +_testTypeAliasWithNewStyleUnion.py:25: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnion.py:28: note: Revealed type is "Union[Type[builtins.int], builtins.str]" + +[case testTypeAliasWithNewStyleUnionInStub] +# flags: --python-version 3.7 +import m +a: m.A +reveal_type(a) +b: m.B +reveal_type(b) +c: m.C +reveal_type(c) +d: m.D +reveal_type(d) +e: m.E +reveal_type(e) +f: m.F +reveal_type(f) + +[file m.pyi] +from typing import Type, Callable +from typing_extensions import Literal, TypeAlias + +Foo = Literal[1, 2] +reveal_type(Foo) +Bar1 = Foo | Literal[3] +Bar2 = Literal[3] | Foo +Bar3 = Foo | Foo | Literal[3] | Foo + +U1 = int | str +U2 = U1 | bytes +U3 = bytes | U1 + +Opt1 = None | int +Opt2 = None | float +Opt3 = int | None +Opt4 = float | None + +A = Type[int] | str +B: TypeAlias = Type[int] | str +C = type[int] | str +reveal_type(C) +D: TypeAlias = type[int] | str +E = str | type[int] +F: TypeAlias = str | type[int] +G = list[type[int] | str] +H = list[str | type[int]] + +CU1 = int | Callable[[], str | bool] +CU2: TypeAlias = int | Callable[[], str | bool] +CU3 = int | Callable[[str | bool], str] +CU4: TypeAlias = int | Callable[[str | bool], str] +[out] +m.pyi:5: note: Revealed type is "typing._SpecialForm" +m.pyi:22: note: Revealed type is "typing._SpecialForm" +_testTypeAliasWithNewStyleUnionInStub.py:4: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:6: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:8: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:10: note: Revealed type is "Union[Type[builtins.int], builtins.str]" +_testTypeAliasWithNewStyleUnionInStub.py:12: note: Revealed type is "Union[builtins.str, Type[builtins.int]]" +_testTypeAliasWithNewStyleUnionInStub.py:14: note: Revealed type is "Union[builtins.str, Type[builtins.int]]" + +[case testEnumNameWorkCorrectlyOn311] +# flags: --python-version 3.11 +import enum + +class E(enum.Enum): + X = 1 + Y = 2 + @enum.property + def foo(self) -> int: ... + +e: E +reveal_type(e.name) +reveal_type(e.value) +reveal_type(E.X.name) +reveal_type(e.foo) +reveal_type(E.Y.foo) +[out] +_testEnumNameWorkCorrectlyOn311.py:11: note: Revealed type is "builtins.str" +_testEnumNameWorkCorrectlyOn311.py:12: note: Revealed type is "Union[Literal[1]?, Literal[2]?]" +_testEnumNameWorkCorrectlyOn311.py:13: note: Revealed type is "Literal['X']?" +_testEnumNameWorkCorrectlyOn311.py:14: note: Revealed type is "builtins.int" +_testEnumNameWorkCorrectlyOn311.py:15: note: Revealed type is "builtins.int" + +[case testTypeAliasNotSupportedWithNewStyleUnion] +# flags: --python-version 3.9 +from typing_extensions import TypeAlias +A = type[int] | str +B = str | type[int] +C = str | int +D: TypeAlias = str | int +[out] +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:3: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:4: error: Value of type "Type[type]" is not indexable +_testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:5: error: Unsupported left operand type for | ("Type[str]") +_testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Invalid type alias: expression is not a valid type +_testTypeAliasNotSupportedWithNewStyleUnion.py:6: error: Unsupported left operand type for | ("Type[str]") + +[case testTypedDictUnionGetFull] +from typing import Dict +from typing_extensions import TypedDict + +class TD(TypedDict, total=False): + x: int + y: int + +A = Dict[str, TD] +x: A +def foo(k: str) -> TD: + reveal_type(x.get(k, {})) + return x.get(k, {}) +[out] +_testTypedDictUnionGetFull.py:11: note: Revealed type is "TypedDict('_testTypedDictUnionGetFull.TD', {'x'?: builtins.int, 'y'?: builtins.int})" + +[case testTupleWithDifferentArgsPy310] +# https://github.com/python/mypy/issues/11098 +# flags: --python-version 3.10 +Correct1 = str | tuple[float, float, str] +Correct2 = tuple[float] | str +Correct3 = tuple[float, ...] | str +Correct4 = tuple[float, str] +Correct5 = tuple[float, ...] +Correct6 = list[tuple[int, str]] +c1: Correct1 +c2: Correct2 +c3: Correct3 +c4: Correct4 +c5: Correct5 +c6: Correct6 +reveal_type(c1) +reveal_type(c2) +reveal_type(c3) +reveal_type(c4) +reveal_type(c5) +reveal_type(c6) + +RHSAlias1: type = tuple[int, int] +RHSAlias2: type = tuple[int] +RHSAlias3: type = tuple[int, ...] + +WrongTypeElement = str | tuple[float, 1] # Error +WrongEllipsis = tuple[float, float, ...] | str # Error + +# TODO: This should produce a fixed-length tuple +reveal_type(tuple[int, str]((1, "x"))) +[out] +_testTupleWithDifferentArgsPy310.py:15: note: Revealed type is "Union[builtins.str, Tuple[builtins.float, builtins.float, builtins.str]]" +_testTupleWithDifferentArgsPy310.py:16: note: Revealed type is "Union[Tuple[builtins.float], builtins.str]" +_testTupleWithDifferentArgsPy310.py:17: note: Revealed type is "Union[builtins.tuple[builtins.float, ...], builtins.str]" +_testTupleWithDifferentArgsPy310.py:18: note: Revealed type is "Tuple[builtins.float, builtins.str]" +_testTupleWithDifferentArgsPy310.py:19: note: Revealed type is "builtins.tuple[builtins.float, ...]" +_testTupleWithDifferentArgsPy310.py:20: note: Revealed type is "builtins.list[Tuple[builtins.int, builtins.str]]" +_testTupleWithDifferentArgsPy310.py:26: error: Invalid type: try using Literal[1] instead? +_testTupleWithDifferentArgsPy310.py:27: error: Unexpected "..." +_testTupleWithDifferentArgsPy310.py:30: note: Revealed type is "builtins.tuple[builtins.object, ...]" + +[case testEnumIterMetaInference] +import socket +from enum import Enum +from typing import Iterable, Iterator, Type, TypeVar + +_E = TypeVar("_E", bound=Enum) + +def enum_iter(cls: Type[_E]) -> Iterable[_E]: + reveal_type(iter(cls)) + reveal_type(next(iter(cls))) + return iter(cls) + +for value in enum_iter(socket.SocketKind): + reveal_type(value) +[out] +_testEnumIterMetaInference.py:8: note: Revealed type is "typing.Iterator[_E`-1]" +_testEnumIterMetaInference.py:9: note: Revealed type is "_E`-1" +_testEnumIterMetaInference.py:13: note: Revealed type is "socket.SocketKind" + +[case testNativeIntTypes] +# Spot check various native int operations with full stubs. +from mypy_extensions import i64, i32 + +x: i64 = 0 +y: int = x +x = i64(0) +y = int(x) +i64() +i64("12") +i64("ab", 16) +i64(1.2) +float(i64(1)) + +i64(1) + i32(2) # Error +reveal_type(x + y) +reveal_type(y + x) +a = [0] +a[x] +[out] +_testNativeIntTypes.py:14: error: Unsupported operand types for + ("i64" and "i32") +_testNativeIntTypes.py:15: note: Revealed type is "mypy_extensions.i64" +_testNativeIntTypes.py:16: note: Revealed type is "mypy_extensions.i64" + +[case testStarUnpackNestedUnderscore] +from typing import Tuple, Dict, List + +def crash() -> None: + d: Dict[int, Tuple[str, int, str]] = {} + k, (v1, *_) = next(iter(d.items())) + +def test1() -> None: + vs: List[str] + d: Dict[int, Tuple[str, int, int]] = {} + k, (v1, *vs) = next(iter(d.items())) + reveal_type(vs) + +def test2() -> None: + d: Dict[int, Tuple[str, int, str]] = {} + k, (v1, *vs) = next(iter(d.items())) + reveal_type(vs) +[out] +_testStarUnpackNestedUnderscore.py:10: error: List item 0 has incompatible type "int"; expected "str" +_testStarUnpackNestedUnderscore.py:10: error: List item 1 has incompatible type "int"; expected "str" +_testStarUnpackNestedUnderscore.py:11: note: Revealed type is "builtins.list[builtins.str]" +_testStarUnpackNestedUnderscore.py:16: note: Revealed type is "builtins.list[builtins.object]" + +[case testStrictEqualitywithParamSpec] +# flags: --strict-equality +from typing import Generic +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") + +class Foo(Generic[P]): ... +class Bar(Generic[P]): ... + +def bad(foo: Foo[[int]], bar: Bar[[int]]) -> bool: + return foo == bar + +def good1(foo1: Foo[[int]], foo2: Foo[[str]]) -> bool: + return foo1 == foo2 + +def good2(foo1: Foo[[int, str]], foo2: Foo[[int, bytes]]) -> bool: + return foo1 == foo2 + +def good3(foo1: Foo[[int]], foo2: Foo[[int, int]]) -> bool: + return foo1 == foo2 + +def good4(foo1: Foo[[int]], foo2: Foo[[int]]) -> bool: + return foo1 == foo2 + +def good5(foo1: Foo[[int]], foo2: Foo[[bool]]) -> bool: + return foo1 == foo2 + +def good6(foo1: Foo[[int, int]], foo2: Foo[[bool, bool]]) -> bool: + return foo1 == foo2 + +def good7(foo1: Foo[[int]], foo2: Foo[P], *args: P.args, **kwargs: P.kwargs) -> bool: + return foo1 == foo2 + +def good8(foo1: Foo[P], foo2: Foo[[int, str, bytes]], *args: P.args, **kwargs: P.kwargs) -> bool: + return foo1 == foo2 + +def good9(foo1: Foo[Concatenate[int, P]], foo2: Foo[[int, str, bytes]], *args: P.args, **kwargs: P.kwargs) -> bool: + return foo1 == foo2 + +[out] +_testStrictEqualitywithParamSpec.py:11: error: Non-overlapping equality check (left operand type: "Foo[[int]]", right operand type: "Bar[[int]]") diff --git a/test-data/unit/reports.test b/test-data/unit/reports.test index a7ab6d754b2c..50dabb1fdea9 100644 --- a/test-data/unit/reports.test +++ b/test-data/unit/reports.test @@ -103,6 +103,28 @@ class A(object): +[case testNoCrashRecursiveAliasInReport] +# cmd: mypy --any-exprs-report report n.py + +[file n.py] +from typing import Union, List, Any, TypeVar + +Nested = List[Union[Any, Nested]] +T = TypeVar("T") +NestedGen = List[Union[T, NestedGen[T]]] + +x: Nested +y: NestedGen[int] +z: NestedGen[Any] + +[file report/any-exprs.txt] +[outfile report/types-of-anys.txt] + Name Unannotated Explicit Unimported Omitted Generics Error Special Form Implementation Artifact +----------------------------------------------------------------------------------------------------------------- + n 0 4 0 8 0 0 0 +----------------------------------------------------------------------------------------------------------------- +Total 0 4 0 8 0 0 0 + [case testTypeVarTreatedAsEmptyLine] # cmd: mypy --html-report report n.py @@ -480,7 +502,7 @@ namespace_packages = True -

folder.subfolder.something

+

folder.subfolder.something

diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test index 4b60ab99f869..20443517e03e 100644 --- a/test-data/unit/semanal-basic.test +++ b/test-data/unit/semanal-basic.test @@ -8,8 +8,9 @@ x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( NameExpr(x [__main__.x]))) @@ -25,8 +26,9 @@ MypyFile:1( NameExpr(y* [__main__.y])) IntExpr(2)) AssignmentStmt:2( - NameExpr(z* [__main__.z]) - IntExpr(3)) + NameExpr(z [__main__.z]) + IntExpr(3) + builtins.int) ExpressionStmt:3( TupleExpr:3( NameExpr(x [__main__.x]) @@ -48,25 +50,27 @@ MypyFile:1( Args()))) [case testAccessingGlobalNameBeforeDefinition] +# flags: --disable-error-code used-before-def x f() x = 1 def f(): pass [out] MypyFile:1( - ExpressionStmt:1( - NameExpr(x [__main__.x])) ExpressionStmt:2( - CallExpr:2( + NameExpr(x [__main__.x])) + ExpressionStmt:3( + CallExpr:3( NameExpr(f [__main__.f]) Args())) - AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(1)) - FuncDef:4( + AssignmentStmt:4( + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) + FuncDef:5( f - Block:4( - PassStmt:4()))) + Block:5( + PassStmt:5()))) [case testFunctionArgs] def f(x, y): @@ -117,8 +121,9 @@ MypyFile:1( NameExpr(g [__main__.g]) Args())))) AssignmentStmt:4( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:5( g Block:5( @@ -134,8 +139,9 @@ def f(y): [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( NameExpr(x [__main__.x]) IntExpr(2)) @@ -163,8 +169,9 @@ x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) FuncDef:2( f Block:2( @@ -498,17 +505,21 @@ MypyFile:1( ExpressionStmt:3( Ellipsis))) AssignmentStmt:4( - NameExpr(x* [__main__.x] = 1) - IntExpr(1)) + NameExpr(x [__main__.x] = 1) + IntExpr(1) + Literal[1]?) AssignmentStmt:5( - NameExpr(y* [__main__.y] = 1.0) - FloatExpr(1.0)) + NameExpr(y [__main__.y] = 1.0) + FloatExpr(1.0) + Literal[1.0]?) AssignmentStmt:6( - NameExpr(s* [__main__.s] = hi) - StrExpr(hi)) + NameExpr(s [__main__.s] = hi) + StrExpr(hi) + Literal['hi']?) AssignmentStmt:7( - NameExpr(t* [__main__.t] = True) - NameExpr(True [builtins.True])) + NameExpr(t [__main__.t] = True) + NameExpr(True [builtins.True]) + Literal[True]?) AssignmentStmt:8( NameExpr(n* [__main__.n] = None) CallExpr:8( diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test index 082a3fe69050..86f8b8656fb6 100644 --- a/test-data/unit/semanal-classes.test +++ b/test-data/unit/semanal-classes.test @@ -248,8 +248,9 @@ MypyFile:1( ClassDef:1( A AssignmentStmt:2( - NameExpr(x* [m]) - IntExpr(1)) + NameExpr(x [m]) + IntExpr(1) + builtins.int) AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))) @@ -287,8 +288,9 @@ MypyFile:1( NameExpr(A [__main__.A])) Then( AssignmentStmt:3( - NameExpr(x* [m]) - IntExpr(1))) + NameExpr(x [m]) + IntExpr(1) + builtins.int)) Else( AssignmentStmt:5( NameExpr(x [__main__.A.x]) @@ -541,8 +543,9 @@ MypyFile:1( ClassDef:2( A AssignmentStmt:3( - NameExpr(X* [m]) - IntExpr(1)) + NameExpr(X [m]) + IntExpr(1) + builtins.int) FuncDef:4( f Args( diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index 943420fa98a1..a4ed905dcb9f 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -316,15 +316,16 @@ x = y tmp/k.py:2: error: Name "y" is not defined [case testPackageWithoutInitFile] +# flags: --no-namespace-packages import typing import m.n m.n.x [file m/n.py] x = 1 [out] -main:2: error: Cannot find implementation or library stub for module named "m.n" -main:2: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -main:2: error: Cannot find implementation or library stub for module named "m" +main:3: error: Cannot find implementation or library stub for module named "m.n" +main:3: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports +main:3: error: Cannot find implementation or library stub for module named "m" [case testBreakOutsideLoop] break @@ -541,7 +542,7 @@ import typing class A: def f(): pass [out] -main:3: error: Method must have at least one argument +main:3: error: Method must have at least one argument. Did you forget the "self" argument? [case testInvalidBaseClass] import typing @@ -563,8 +564,8 @@ class A: def f() -> None: pass def g(): pass [out] -main:3: error: Method must have at least one argument -main:4: error: Method must have at least one argument +main:3: error: Method must have at least one argument. Did you forget the "self" argument? +main:4: error: Method must have at least one argument. Did you forget the "self" argument? [case testMultipleMethodDefinition] import typing @@ -1456,9 +1457,11 @@ bad: Tuple[Unpack[int]] # E: builtins.int cannot be unpacked (must be tuple or [builtins fixtures/tuple.pyi] [case testTypeVarTuple] +from typing import Generic from typing_extensions import TypeVarTuple, Unpack TVariadic = TypeVarTuple('TVariadic') +TVariadic2 = TypeVarTuple('TVariadic2') TP = TypeVarTuple('?') # E: String argument 1 "?" to TypeVarTuple(...) does not match variable name "TP" TP2: int = TypeVarTuple('TP2') # E: Cannot declare the type of a TypeVar or similar construct TP3 = TypeVarTuple() # E: Too few arguments for TypeVarTuple() @@ -1467,3 +1470,16 @@ TP5 = TypeVarTuple(t='TP5') # E: TypeVarTuple() expects a string literal as fir x: TVariadic # E: TypeVarTuple "TVariadic" is unbound y: Unpack[TVariadic] # E: TypeVarTuple "TVariadic" is unbound + + +class Variadic(Generic[Unpack[TVariadic], Unpack[TVariadic2]]): # E: Can only use one type var tuple in a class def + pass + +# TODO: this should generate an error +#def bad_args(*args: TVariadic): +# pass + +def bad_kwargs(**kwargs: Unpack[TVariadic]): # E: Unpack item in ** argument must be a TypedDict + pass + +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test index 98bf32708f1b..fa07e533a842 100644 --- a/test-data/unit/semanal-expressions.test +++ b/test-data/unit/semanal-expressions.test @@ -15,8 +15,9 @@ x.y [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( MemberExpr:2( NameExpr(x [__main__.x]) @@ -80,8 +81,9 @@ not x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) ExpressionStmt:2( UnaryExpr:2( - @@ -187,8 +189,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( @@ -223,8 +226,9 @@ b = [x for x in a if x] [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) ListComprehension:2( @@ -240,8 +244,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( SetComprehension:2( GeneratorExpr:2( @@ -258,8 +263,9 @@ b = {x for x in a if x} [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) SetComprehension:2( @@ -275,8 +281,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( DictionaryComprehension:2( NameExpr(x [l]) @@ -293,8 +300,9 @@ b = {x: x + 1 for x in a if x} [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) AssignmentStmt:2( NameExpr(b* [__main__.b]) DictionaryComprehension:2( @@ -313,8 +321,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) @@ -327,8 +336,9 @@ a = 0 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(a* [__main__.a]) - IntExpr(0)) + NameExpr(a [__main__.a]) + IntExpr(0) + builtins.int) ExpressionStmt:2( GeneratorExpr:2( NameExpr(x [l]) @@ -345,8 +355,9 @@ lambda: x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:2( LambdaExpr:2( Block:2( diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test index 16b9a9b18250..bc381293161f 100644 --- a/test-data/unit/semanal-modules.test +++ b/test-data/unit/semanal-modules.test @@ -16,8 +16,9 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(y* [x.y]) - IntExpr(1))) + NameExpr(y [x.y]) + IntExpr(1) + builtins.int)) [case testImportedNameInType] import m @@ -51,8 +52,9 @@ MypyFile:1( MypyFile:1( tmp/m.py AssignmentStmt:1( - NameExpr(y* [m.y]) - IntExpr(1))) + NameExpr(y [m.y]) + IntExpr(1) + builtins.int)) [case testImportFromType] from m import c @@ -342,8 +344,9 @@ MypyFile:1( MypyFile:1( tmp/m/n.py AssignmentStmt:1( - NameExpr(x* [m.n.x]) - IntExpr(1))) + NameExpr(x [m.n.x]) + IntExpr(1) + builtins.int)) [case testImportFromSubmodule] from m._n import x @@ -448,8 +451,9 @@ MypyFile:1( MypyFile:1( tmp/m/n/k.py AssignmentStmt:1( - NameExpr(x* [m.n.k.x]) - IntExpr(1))) + NameExpr(x [m.n.k.x]) + IntExpr(1) + builtins.int)) [case testImportInSubmodule] import m._n @@ -609,8 +613,9 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(y* [x.y]) - IntExpr(1))) + NameExpr(y [x.y]) + IntExpr(1) + builtins.int)) [case testRelativeImport0] import m.x @@ -637,8 +642,9 @@ MypyFile:1( MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(1))) + NameExpr(y [m.z.y]) + IntExpr(1) + builtins.int)) [case testRelativeImport1] import m.t.b as b @@ -673,13 +679,15 @@ MypyFile:1( MypyFile:1( tmp/m/x.py AssignmentStmt:1( - NameExpr(y* [m.x.y]) - IntExpr(1))) + NameExpr(y [m.x.y]) + IntExpr(1) + builtins.int)) MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(3))) + NameExpr(y [m.z.y]) + IntExpr(3) + builtins.int)) [case testRelativeImport2] import m.t.b as b @@ -712,13 +720,15 @@ MypyFile:1( MypyFile:1( tmp/m/x.py AssignmentStmt:1( - NameExpr(y* [m.x.y]) - IntExpr(1))) + NameExpr(y [m.x.y]) + IntExpr(1) + builtins.int)) MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(y* [m.z.y]) - IntExpr(3))) + NameExpr(y [m.z.y]) + IntExpr(3) + builtins.int)) [case testRelativeImport3] import m.t @@ -762,8 +772,9 @@ MypyFile:1( MypyFile:1( tmp/m/z.py AssignmentStmt:1( - NameExpr(zy* [m.z.zy]) - IntExpr(3))) + NameExpr(zy [m.z.zy]) + IntExpr(3) + builtins.int)) [case testRelativeImportFromSameModule] import m.x @@ -803,7 +814,7 @@ def somef_unction(): [file f.py] from m.x import somefunction [out] -tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "somef_unction" or "some_function"? +tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "some_function" or "somef_unction"? [case testImportMisspellingMultipleCandidatesTruncated] import f @@ -820,7 +831,7 @@ def somefun_ction(): [file f.py] from m.x import somefunction [out] -tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "somefun_ction", "somefu_nction", or "somef_unction"? +tmp/f.py:1: error: Module "m.x" has no attribute "somefunction"; maybe "some_function", "somef_unction", or "somefu_nction"? [case testFromImportAsInStub] from m import * @@ -914,5 +925,6 @@ MypyFile:1( MypyFile:1( tmp/x.py AssignmentStmt:1( - NameExpr(a* [x.a]) - IntExpr(1))) + NameExpr(a [x.a]) + IntExpr(1) + builtins.int)) diff --git a/test-data/unit/semanal-python310.test b/test-data/unit/semanal-python310.test index a009636575dc..9418ac2912b2 100644 --- a/test-data/unit/semanal-python310.test +++ b/test-data/unit/semanal-python310.test @@ -8,8 +8,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -28,8 +29,9 @@ a [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -49,8 +51,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -78,8 +81,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -99,8 +103,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -121,11 +126,13 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( - NameExpr(a* [__main__.a]) - IntExpr(1)) + NameExpr(a [__main__.a]) + IntExpr(1) + builtins.int) MatchStmt:3( NameExpr(x [__main__.x]) Pattern( @@ -144,8 +151,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -164,8 +172,9 @@ match x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:2( NameExpr(x [__main__.x]) Pattern( @@ -191,8 +200,9 @@ b = 1 MypyFile:1( Import:1(_a) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) MatchStmt:4( NameExpr(x [__main__.x]) Pattern( diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test index fdc5ca2bbbdd..013452068cf1 100644 --- a/test-data/unit/semanal-statements.test +++ b/test-data/unit/semanal-statements.test @@ -272,8 +272,9 @@ else: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) IfStmt:2( If( NameExpr(x [__main__.x])) @@ -326,8 +327,9 @@ MypyFile:1( NameExpr(y* [__main__.y])) IntExpr(1)) AssignmentStmt:2( - NameExpr(xx* [__main__.xx]) - IntExpr(1)) + NameExpr(xx [__main__.xx]) + IntExpr(1) + builtins.int) AssignmentStmt:3( MemberExpr:3( NameExpr(x [__main__.x]) @@ -408,8 +410,9 @@ MypyFile:1( [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y])) @@ -436,8 +439,9 @@ y, x = 1 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) @@ -450,8 +454,9 @@ y, (x, z) = 1 [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) AssignmentStmt:2( TupleExpr:2( NameExpr(y* [__main__.y]) @@ -468,8 +473,9 @@ if x: [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) IfStmt:2( If( NameExpr(x [__main__.x])) @@ -510,8 +516,9 @@ del x [out] MypyFile:1( AssignmentStmt:1( - NameExpr(x* [__main__.x]) - IntExpr(1)) + NameExpr(x [__main__.x]) + IntExpr(1) + builtins.int) DelStmt:2( NameExpr(x [__main__.x]))) @@ -782,6 +789,7 @@ MypyFile:1( Args()))))) [case testTryExceptWithMultipleHandlers] +class Err(BaseException): pass try: pass except BaseException as e: @@ -789,36 +797,34 @@ except BaseException as e: except Err as f: f = BaseException() # Fail f = Err() -class Err(BaseException): pass [builtins fixtures/exception.pyi] [out] MypyFile:1( - TryStmt:1( - Block:1( - PassStmt:2()) + ClassDef:1( + Err + BaseType( + builtins.BaseException) + PassStmt:1()) + TryStmt:2( + Block:2( + PassStmt:3()) NameExpr(BaseException [builtins.BaseException]) NameExpr(e* [__main__.e]) - Block:3( - PassStmt:4()) + Block:4( + PassStmt:5()) NameExpr(Err [__main__.Err]) NameExpr(f* [__main__.f]) - Block:5( - AssignmentStmt:6( + Block:6( + AssignmentStmt:7( NameExpr(f [__main__.f]) - CallExpr:6( + CallExpr:7( NameExpr(BaseException [builtins.BaseException]) Args())) - AssignmentStmt:7( + AssignmentStmt:8( NameExpr(f [__main__.f]) - CallExpr:7( + CallExpr:8( NameExpr(Err [__main__.Err]) - Args())))) - ClassDef:8( - Err - BaseType( - builtins.BaseException) - PassStmt:8())) - + Args()))))) [case testMultipleAssignmentWithPartialNewDef] # flags: --allow-redefinition o = None @@ -961,16 +967,18 @@ MypyFile:1( Block:2( PassStmt:2())) AssignmentStmt:3( - NameExpr(x'* [__main__.x']) - IntExpr(0)) + NameExpr(x' [__main__.x']) + IntExpr(0) + builtins.int) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) Args( NameExpr(x' [__main__.x'])))) AssignmentStmt:5( - NameExpr(x* [__main__.x]) - StrExpr()) + NameExpr(x [__main__.x]) + StrExpr() + builtins.str) ExpressionStmt:6( CallExpr:6( NameExpr(f [__main__.f]) @@ -993,8 +1001,9 @@ MypyFile:1( Block:2( PassStmt:2())) AssignmentStmt:3( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:4( CallExpr:4( NameExpr(f [__main__.f]) @@ -1046,15 +1055,17 @@ x = '' [out] MypyFile:1( AssignmentStmt:2( - NameExpr(x* [__main__.x]) - IntExpr(0)) + NameExpr(x [__main__.x]) + IntExpr(0) + builtins.int) ExpressionStmt:3( NameExpr(x [__main__.x])) ClassDef:4( A AssignmentStmt:5( - NameExpr(x* [m]) - IntExpr(1))) + NameExpr(x [m]) + IntExpr(1) + builtins.int)) AssignmentStmt:6( NameExpr(x [__main__.x]) StrExpr())) @@ -1114,3 +1125,191 @@ MypyFile:1( AssignmentStmt:5( NameExpr(y [__main__.y]) IntExpr(1))))) + +[case testConstantFold1] +from typing_extensions import Final +add: Final = 15 + 47 +add_mul: Final = (2 + 3) * 5 +sub: Final = 7 - 11 +bit_and: Final = 6 & 10 +bit_or: Final = 6 | 10 +bit_xor: Final = 6 ^ 10 +lshift: Final = 5 << 2 +rshift: Final = 13 >> 2 +lshift0: Final = 5 << 0 +rshift0: Final = 13 >> 0 +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [Final]) + AssignmentStmt:2( + NameExpr(add [__main__.add] = 62) + OpExpr:2( + + + IntExpr(15) + IntExpr(47)) + Literal[62]?) + AssignmentStmt:3( + NameExpr(add_mul [__main__.add_mul] = 25) + OpExpr:3( + * + OpExpr:3( + + + IntExpr(2) + IntExpr(3)) + IntExpr(5)) + Literal[25]?) + AssignmentStmt:4( + NameExpr(sub [__main__.sub] = -4) + OpExpr:4( + - + IntExpr(7) + IntExpr(11)) + Literal[-4]?) + AssignmentStmt:5( + NameExpr(bit_and [__main__.bit_and] = 2) + OpExpr:5( + & + IntExpr(6) + IntExpr(10)) + Literal[2]?) + AssignmentStmt:6( + NameExpr(bit_or [__main__.bit_or] = 14) + OpExpr:6( + | + IntExpr(6) + IntExpr(10)) + Literal[14]?) + AssignmentStmt:7( + NameExpr(bit_xor [__main__.bit_xor] = 12) + OpExpr:7( + ^ + IntExpr(6) + IntExpr(10)) + Literal[12]?) + AssignmentStmt:8( + NameExpr(lshift [__main__.lshift] = 20) + OpExpr:8( + << + IntExpr(5) + IntExpr(2)) + Literal[20]?) + AssignmentStmt:9( + NameExpr(rshift [__main__.rshift] = 3) + OpExpr:9( + >> + IntExpr(13) + IntExpr(2)) + Literal[3]?) + AssignmentStmt:10( + NameExpr(lshift0 [__main__.lshift0] = 5) + OpExpr:10( + << + IntExpr(5) + IntExpr(0)) + Literal[5]?) + AssignmentStmt:11( + NameExpr(rshift0 [__main__.rshift0] = 13) + OpExpr:11( + >> + IntExpr(13) + IntExpr(0)) + Literal[13]?)) + +[case testConstantFold2] +from typing_extensions import Final +neg1: Final = -5 +neg2: Final = --1 +neg3: Final = -0 +pos: Final = +5 +inverted1: Final = ~0 +inverted2: Final = ~5 +inverted3: Final = ~3 +p0: Final = 3**0 +p1: Final = 3**5 +p2: Final = (-5)**3 +p3: Final = 0**0 +s: Final = 'x' + 'y' +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [Final]) + AssignmentStmt:2( + NameExpr(neg1 [__main__.neg1] = -5) + UnaryExpr:2( + - + IntExpr(5)) + Literal[-5]?) + AssignmentStmt:3( + NameExpr(neg2 [__main__.neg2] = 1) + UnaryExpr:3( + - + UnaryExpr:3( + - + IntExpr(1))) + Literal[1]?) + AssignmentStmt:4( + NameExpr(neg3 [__main__.neg3] = 0) + UnaryExpr:4( + - + IntExpr(0)) + Literal[0]?) + AssignmentStmt:5( + NameExpr(pos [__main__.pos] = 5) + UnaryExpr:5( + + + IntExpr(5)) + Literal[5]?) + AssignmentStmt:6( + NameExpr(inverted1 [__main__.inverted1] = -1) + UnaryExpr:6( + ~ + IntExpr(0)) + Literal[-1]?) + AssignmentStmt:7( + NameExpr(inverted2 [__main__.inverted2] = -6) + UnaryExpr:7( + ~ + IntExpr(5)) + Literal[-6]?) + AssignmentStmt:8( + NameExpr(inverted3 [__main__.inverted3] = -4) + UnaryExpr:8( + ~ + IntExpr(3)) + Literal[-4]?) + AssignmentStmt:9( + NameExpr(p0 [__main__.p0] = 1) + OpExpr:9( + ** + IntExpr(3) + IntExpr(0)) + Literal[1]?) + AssignmentStmt:10( + NameExpr(p1 [__main__.p1] = 243) + OpExpr:10( + ** + IntExpr(3) + IntExpr(5)) + Literal[243]?) + AssignmentStmt:11( + NameExpr(p2 [__main__.p2] = -125) + OpExpr:11( + ** + UnaryExpr:11( + - + IntExpr(5)) + IntExpr(3)) + Literal[-125]?) + AssignmentStmt:12( + NameExpr(p3 [__main__.p3] = 1) + OpExpr:12( + ** + IntExpr(0) + IntExpr(0)) + Literal[1]?) + AssignmentStmt:13( + NameExpr(s [__main__.s] = xy) + OpExpr:13( + + + StrExpr(x) + StrExpr(y)) + Literal['xy']?)) diff --git a/test-data/unit/semanal-symtable.test b/test-data/unit/semanal-symtable.test index bdf4f52ae5fc..c886080557b0 100644 --- a/test-data/unit/semanal-symtable.test +++ b/test-data/unit/semanal-symtable.test @@ -9,7 +9,7 @@ x = 1 [out] __main__: SymbolTable( - x : Gdef/Var (__main__.x)) + x : Gdef/Var (__main__.x) : builtins.int) [case testFuncDef] def f(): pass @@ -35,7 +35,7 @@ __main__: m : Gdef/MypyFile (m)) m: SymbolTable( - x : Gdef/Var (m.x)) + x : Gdef/Var (m.x) : builtins.int) [case testImportFromModule] from m import x @@ -49,7 +49,7 @@ __main__: m: SymbolTable( x : Gdef/TypeInfo (m.x) - y : Gdef/Var (m.y)) + y : Gdef/Var (m.y) : builtins.int) [case testImportAs] from m import x as xx @@ -63,7 +63,7 @@ __main__: m: SymbolTable( x : Gdef/TypeInfo (m.x) - y : Gdef/Var (m.y)) + y : Gdef/Var (m.y) : builtins.int) [case testFailingImports] from sys import non_existing1 # type: ignore @@ -80,7 +80,7 @@ __main__: non_existing4 : Gdef/Var (__main__.non_existing4) : Any) sys: SymbolTable( - platform : Gdef/Var (sys.platform) + platform : Gdef/Var (sys.platform) : builtins.str version_info : Gdef/Var (sys.version_info)) [case testDecorator] @@ -95,6 +95,6 @@ def g() -> None: [out] __main__: SymbolTable( - Callable : Gdef/Var (typing.Callable) + Callable : Gdef/Var (typing.Callable) : builtins.int dec : Gdef/FuncDef (__main__.dec) : def (f: def ()) -> def () g : Gdef/Decorator (__main__.g) : def ()) diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index d832772f5f81..494d701b758a 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -790,6 +790,7 @@ def f(x: int) -> None: pass def f(*args) -> None: pass x = f +[builtins fixtures/tuple.pyi] [out] MypyFile:1( ImportFrom:1(typing, [overload]) @@ -1032,6 +1033,7 @@ MypyFile:1( [case testVarArgsAndKeywordArgs] def g(*x: int, y: str = ''): pass +[builtins fixtures/tuple.pyi] [out] MypyFile:1( FuncDef:1( @@ -1558,3 +1560,29 @@ MypyFile:1( AssignmentStmt:2( NameExpr(TV* [__main__.TV]) TypeVarTupleExpr:2())) + +[builtins fixtures/tuple.pyi] + +[case testTypeVarTupleCallable] +from typing_extensions import TypeVarTuple, Unpack +from typing import Callable +Ts = TypeVarTuple("Ts") + +def foo(x: Callable[[Unpack[Ts]], None]) -> None: + pass +[out] +MypyFile:1( + ImportFrom:1(typing_extensions, [TypeVarTuple, Unpack]) + ImportFrom:2(typing, [Callable]) + AssignmentStmt:3( + NameExpr(Ts* [__main__.Ts]) + TypeVarTupleExpr:3()) + FuncDef:5( + foo + Args( + Var(x)) + def [Ts] (x: def (*Unpack[Ts`-1])) + Block:5( + PassStmt:6()))) + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 408f116443d2..8e4285b7de2e 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -947,16 +947,6 @@ from typing import Any alias = Container[Any] -[case testAliasOnlyToplevel] -class Foo: - alias = str - -[out] -from _typeshed import Incomplete - -class Foo: - alias: Incomplete - [case testAliasExceptions] noalias1 = None noalias2 = ... @@ -969,6 +959,56 @@ noalias1: Incomplete noalias2: Incomplete noalias3: bool +[case testComplexAlias] +# modules: main a + +from a import valid + +def func() -> int: + return 2 + +aliased_func = func +int_value = 1 + +class A: + cls_var = valid + + def __init__(self, arg: str) -> None: + self.self_var = arg + + def meth(self) -> None: + func_value = int_value + + alias_meth = meth + alias_func = func + alias_alias_func = aliased_func + int_value = int_value + +[file a.py] +valid : list[int] = [1, 2, 3] + + +[out] +# main.pyi +from _typeshed import Incomplete +from a import valid + +def func() -> int: ... +aliased_func = func +int_value: int + +class A: + cls_var = valid + self_var: Incomplete + def __init__(self, arg: str) -> None: ... + def meth(self) -> None: ... + alias_meth = meth + alias_func = func + alias_alias_func = aliased_func + int_value = int_value +# a.pyi +valid: list[int] + -- More features/fixes: -- do not export deleted names @@ -2705,3 +2745,51 @@ def f(): return 0 [out] def f(): ... + +[case testKnownMagicMethodsReturnTypes] +class Some: + def __len__(self): ... + def __length_hint__(self): ... + def __init__(self): ... + def __del__(self): ... + def __bool__(self): ... + def __bytes__(self): ... + def __format__(self, spec): ... + def __contains__(self, obj): ... + def __complex__(self): ... + def __int__(self): ... + def __float__(self): ... + def __index__(self): ... +[out] +class Some: + def __len__(self) -> int: ... + def __length_hint__(self) -> int: ... + def __init__(self) -> None: ... + def __del__(self) -> None: ... + def __bool__(self) -> bool: ... + def __bytes__(self) -> bytes: ... + def __format__(self, spec) -> str: ... + def __contains__(self, obj) -> bool: ... + def __complex__(self) -> complex: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __index__(self) -> int: ... + + +[case testTypeVarPEP604Bound] +from typing import TypeVar +T = TypeVar("T", bound=str | None) +[out] +from typing import TypeVar + +T = TypeVar('T', bound=str | None) + + +[case testPEP604UnionType] +a: str | int + +def f(x: str | None) -> None: ... +[out] +a: str | int + +def f(x: str | None) -> None: ... diff --git a/test-data/unit/typexport-basic.test b/test-data/unit/typexport-basic.test index 5cbdf38d1b4f..cd4071eb14ee 100644 --- a/test-data/unit/typexport-basic.test +++ b/test-data/unit/typexport-basic.test @@ -21,15 +21,15 @@ [case testConstructorCall] import typing -A() -B() class A: pass class B: pass +A() +B() [out] -CallExpr(2) : A -NameExpr(2) : def () -> A -CallExpr(3) : B -NameExpr(3) : def () -> B +CallExpr(4) : A +NameExpr(4) : def () -> A +CallExpr(5) : B +NameExpr(5) : def () -> B [case testLiterals] import typing @@ -139,6 +139,8 @@ class float: def __sub__(self, x: int) -> float: pass class type: pass class str: pass +class list: pass +class dict: pass [out] OpExpr(3) : builtins.int OpExpr(4) : builtins.float @@ -165,6 +167,8 @@ class bool: pass class type: pass class function: pass class str: pass +class list: pass +class dict: pass [out] ComparisonExpr(3) : builtins.bool ComparisonExpr(4) : builtins.bool @@ -202,17 +206,17 @@ UnaryExpr(6) : builtins.bool [case testFunctionCall] ## CallExpr from typing import Tuple -f( - A(), - B()) class A: pass class B: pass def f(a: A, b: B) -> Tuple[A, B]: pass +f( + A(), + B()) [builtins fixtures/tuple-simple.pyi] [out] -CallExpr(3) : Tuple[A, B] -CallExpr(4) : A -CallExpr(5) : B +CallExpr(6) : Tuple[A, B] +CallExpr(7) : A +CallExpr(8) : B -- Statements @@ -602,28 +606,26 @@ NameExpr(4) : def [t] (x: t`-1) -> t`-1 ## CallExpr from typing import TypeVar, Generic T = TypeVar('T') -f(g()) -f(h(b)) -f(h(c)) - -b = None # type: B -c = None # type: C - +class A(Generic[T]): pass +class B: pass +class C(B): pass def f(a: 'A[B]') -> None: pass - def g() -> 'A[T]': pass def h(a: T) -> 'A[T]': pass -class A(Generic[T]): pass -class B: pass -class C(B): pass +b = None # type: B +c = None # type: C + +f(g()) +f(h(b)) +f(h(c)) [out] -CallExpr(4) : None -CallExpr(4) : A[B] -CallExpr(5) : None -CallExpr(5) : A[B] -CallExpr(6) : None -CallExpr(6) : A[B] +CallExpr(14) : None +CallExpr(14) : A[B] +CallExpr(15) : None +CallExpr(15) : A[B] +CallExpr(16) : None +CallExpr(16) : A[B] [case testInferGenericTypeForLocalVariable] from typing import TypeVar, Generic @@ -697,21 +699,21 @@ ListExpr(2) : builtins.list[Any] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -map( - f, - [A()]) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +map( + f, + [A()]) [builtins fixtures/list.pyi] [out] -CallExpr(4) : builtins.list[B] -NameExpr(4) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -NameExpr(5) : def (a: A) -> B -CallExpr(6) : A -ListExpr(6) : builtins.list[A] -NameExpr(6) : def () -> A +CallExpr(8) : builtins.list[B] +NameExpr(8) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +NameExpr(9) : def (a: A) -> B +CallExpr(10) : A +ListExpr(10) : builtins.list[A] +NameExpr(10) : def () -> A -- Lambdas @@ -761,106 +763,106 @@ ListExpr(2) : builtins.list[A] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: f(x), l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +l = None # type: List[A] +map( + lambda x: f(x), l) [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -CallExpr(6) : B -LambdaExpr(6) : def (A) -> B -NameExpr(6) : def (a: A) -> B -NameExpr(6) : builtins.list[A] -NameExpr(6) : A +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +CallExpr(10) : B +LambdaExpr(10) : def (A) -> B +NameExpr(10) : def (a: A) -> B +NameExpr(10) : builtins.list[A] +NameExpr(10) : A [case testLambdaAndHigherOrderFunction2] ## LambdaExpr|NameExpr|ListExpr from typing import TypeVar, List, Callable t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: [f(x)], l) def map(f: Callable[[t], List[s]], a: List[t]) -> List[s]: pass class A: pass class B: pass def f(a: A) -> B: pass +l = None # type: List[A] +map( + lambda x: [f(x)], l) [builtins fixtures/list.pyi] [out] -NameExpr(6) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(7) : def (A) -> builtins.list[B] -ListExpr(7) : builtins.list[B] -NameExpr(7) : def (a: A) -> B -NameExpr(7) : builtins.list[A] -NameExpr(7) : A +NameExpr(10) : def (f: def (A) -> builtins.list[B], a: builtins.list[A]) -> builtins.list[B] +LambdaExpr(11) : def (A) -> builtins.list[B] +ListExpr(11) : builtins.list[B] +NameExpr(11) : def (a: A) -> B +NameExpr(11) : builtins.list[A] +NameExpr(11) : A [case testLambdaInListAndHigherOrderFunction] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') +def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass +class A: pass l = None # type: List[A] map( [lambda x: x], l) -def map(f: List[Callable[[t], s]], a: List[t]) -> List[s]: pass -class A: pass [builtins fixtures/list.pyi] [out] -- TODO We probably should not silently infer 'Any' types in statically typed -- context. Perhaps just fail instead? -CallExpr(5) : builtins.list[Any] -NameExpr(5) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] -LambdaExpr(6) : def (A) -> A -ListExpr(6) : builtins.list[def (A) -> Any] -NameExpr(6) : A -NameExpr(7) : builtins.list[A] +CallExpr(7) : builtins.list[Any] +NameExpr(7) : def (f: builtins.list[def (A) -> Any], a: builtins.list[A]) -> builtins.list[Any] +LambdaExpr(8) : def (A) -> A +ListExpr(8) : builtins.list[def (A) -> Any] +NameExpr(8) : A +NameExpr(9) : builtins.list[A] [case testLambdaAndHigherOrderFunction3] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') -l = None # type: List[A] -map( - lambda x: x.b, - l) def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass class A: b = None # type: B class B: pass +l = None # type: List[A] +map( + lambda x: x.b, + l) [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -LambdaExpr(6) : def (A) -> B -MemberExpr(6) : B -NameExpr(6) : A -NameExpr(7) : builtins.list[A] +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +LambdaExpr(10) : def (A) -> B +MemberExpr(10) : B +NameExpr(10) : A +NameExpr(11) : builtins.list[A] [case testLambdaAndHigherOrderFunctionAndKeywordArgs] from typing import TypeVar, Callable, List t = TypeVar('t') s = TypeVar('s') +def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass +class A: + b = None # type: B +class B: pass l = None # type: List[A] map( a=l, f=lambda x: x.b) -def map(f: Callable[[t], s], a: List[t]) -> List[s]: pass -class A: - b = None # type: B -class B: pass [builtins fixtures/list.pyi] [out] -CallExpr(5) : builtins.list[B] -NameExpr(5) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] -NameExpr(6) : builtins.list[A] -LambdaExpr(7) : def (A) -> B -MemberExpr(7) : B -NameExpr(7) : A +CallExpr(9) : builtins.list[B] +NameExpr(9) : def (f: def (A) -> B, a: builtins.list[A]) -> builtins.list[B] +NameExpr(10) : builtins.list[A] +LambdaExpr(11) : def (A) -> B +MemberExpr(11) : B +NameExpr(11) : A -- Boolean operations diff --git a/test-requirements.txt b/test-requirements.txt index d5bc3f1113a9..aec11e87e96f 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,14 +1,13 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -black==22.6.0 # must match version in .pre-commit-config.yaml -filelock>=3.3.0,<3.4.2; python_version<'3.7' -filelock>=3.3.0; python_version>='3.7' -flake8==3.9.2 # must match version in .pre-commit-config.yaml -flake8-bugbear==22.7.1 # must match version in .pre-commit-config.yaml -flake8-noqa==1.2.8 # must match version in .pre-commit-config.yaml -isort[colors]==5.10.1 # must match version in .pre-commit-config.yaml -lxml>=4.4.0; python_version<'3.11' +black==22.12.0 # must match version in .pre-commit-config.yaml +filelock>=3.3.0 +flake8==5.0.4 # must match version in .pre-commit-config.yaml +flake8-bugbear==22.12.6 # must match version in .pre-commit-config.yaml +flake8-noqa==1.3.0 # must match version in .pre-commit-config.yaml +isort[colors]==5.11.4 # must match version in .pre-commit-config.yaml +lxml>=4.9.1; (python_version<'3.11' or sys_platform!='win32') and python_version<'3.12' psutil>=4.0 # pytest 6.2.3 does not support Python 3.10 pytest>=6.2.4 @@ -16,7 +15,6 @@ pytest-xdist>=1.34.0 pytest-forked>=1.3.0,<2.0.0 pytest-cov>=2.10.0 py>=1.5.2 -typed_ast>=1.5.4,<2; python_version>='3.8' -setuptools!=50 +setuptools>=65.5.1 six -importlib-metadata>=4.6.1,<5.0.0 +tomli>=1.1.0 diff --git a/tox.ini b/tox.ini index d2284813195e..443f05dc8bcf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,8 +1,7 @@ [tox] -minversion = 3.8.0 +minversion = 4.4.4 skip_missing_interpreters = {env:TOX_SKIP_MISSING_INTERPRETERS:True} envlist = - py36, py37, py38, py39, @@ -14,36 +13,13 @@ isolated_build = true [testenv] description = run the test driver with {basepython} -setenv = cov: COVERAGE_FILE={toxworkdir}/.coverage.{envname} -passenv = PYTEST_XDIST_WORKER_COUNT PROGRAMDATA PROGRAMFILES(X86) -deps = -rtest-requirements.txt -commands = python -m pytest --durations 100 {posargs} - cov: python -m pytest --durations 100 {posargs: --cov mypy --cov-config setup.cfg} - - -[testenv:coverage] -description = [run locally after tests]: combine coverage data and create report -deps = - coverage >= 4.5.1, < 5 - diff_cover >= 1.0.5, <2 -skip_install = True passenv = - {[testenv]passenv} - DIFF_AGAINST -setenv = COVERAGE_FILE={toxworkdir}/.coverage -commands = - coverage combine --rcfile setup.cfg - coverage report -m --rcfile setup.cfg - coverage xml -o {toxworkdir}/coverage.xml --rcfile setup.cfg - coverage html -d {toxworkdir}/htmlcov --rcfile setup.cfg - diff-cover --compare-branch {env:DIFF_AGAINST:origin/master} {toxworkdir}/coverage.xml -depends = - py36, - py37, - py38, - py39, - py310, -parallel_show_output = True + PYTEST_XDIST_WORKER_COUNT + PROGRAMDATA + PROGRAMFILES(X86) + PYTEST_ADDOPTS +deps = -rtest-requirements.txt +commands = python -m pytest {posargs} [testenv:lint] description = check the code style @@ -54,9 +30,13 @@ commands = [testenv:type] description = type check ourselves +passenv = + TERM + MYPY_FORCE_COLOR + MYPY_FORCE_TERMINAL_WIDTH commands = python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc - python -m mypy --config-file mypy_self_check.ini misc/proper_plugin.py + python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py [testenv:docs] description = invoke sphinx-build to build the HTML docs
folder/subfolder/something.py