From 3932c2fdc46423d9c3774ff129b6a838299a7098 Mon Sep 17 00:00:00 2001 From: Jib Date: Wed, 1 Nov 2023 16:51:22 -0400 Subject: [PATCH 001/221] BUMP 4.7.0.dev0 --- pymongo/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymongo/_version.py b/pymongo/_version.py index c1b30dc88f..62a22ecfef 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -17,7 +17,7 @@ from typing import Tuple, Union -version_tuple: Tuple[Union[int, str], ...] = (4, 6, 0) +version_tuple: Tuple[Union[int, str], ...] = (4, 7, 0, ".dev0") def get_version_string() -> str: From 57ca5b19d023c5e786b5595fc441668722c1f7db Mon Sep 17 00:00:00 2001 From: Jib Date: Wed, 1 Nov 2023 17:03:24 -0400 Subject: [PATCH 002/221] updated the RELEASE.rst to have more recent links --- RELEASE.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/RELEASE.rst b/RELEASE.rst index 55e39baf5a..cabddef84c 100644 --- a/RELEASE.rst +++ b/RELEASE.rst @@ -77,11 +77,11 @@ Doing a Release 12. Publish the release version in Jira. 13. Announce the release on: - https://www.mongodb.com/community/forums/c/announcements/driver-releases/110 + https://www.mongodb.com/community/forums/tags/c/announcements/driver-releases/110/python 14. File a ticket for DOCSP highlighting changes in server version and Python version compatibility or the lack thereof, for example: - https://jira.mongodb.org/browse/DOCSP-13536 + https://jira.mongodb.org/browse/DOCSP-34040 15. Create a GitHub Release for the tag using https://github.com/mongodb/mongo-python-driver/releases/new. From 7936708d97b6bb77a0d6fe6c378f81594c34a7c5 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 1 Nov 2023 19:37:22 -0500 Subject: [PATCH 003/221] PYTHON-4014 Apply QA Suggestions from Scientific Python Library Development Guide (#1414) --- .github/workflows/test-python.yml | 1 + .pre-commit-config.yaml | 21 +++++----- MANIFEST.in | 1 - mypy.ini | 40 ------------------ mypy_test.ini | 9 ++++ pymongo/aggregation.py | 1 - pymongo/auth.py | 4 +- pymongo/auth_aws.py | 13 ++++-- pymongo/client_options.py | 5 +-- pymongo/collation.py | 11 +++-- pymongo/collection.py | 7 ++-- pymongo/compression_support.py | 2 +- pymongo/cursor.py | 2 +- pymongo/encryption.py | 14 +++---- pymongo/encryption_options.py | 2 +- pymongo/mongo_client.py | 2 +- pymongo/monitor.py | 1 + pymongo/operations.py | 11 ++++- pymongo/pool.py | 2 +- pymongo/pyopenssl_context.py | 4 +- pymongo/ssl_support.py | 2 +- pymongo/topology_description.py | 1 - pymongo/write_concern.py | 2 +- pyproject.toml | 41 ++++++++++++++++++- pytest.ini | 6 --- test/__init__.py | 2 +- test/auth_oidc/test_auth_oidc.py | 3 -- .../mockupdb/test_mongos_command_read_mode.py | 1 - test/qcheck.py | 1 - test/test_auth.py | 1 - test/test_binary.py | 4 +- test/test_bulk.py | 6 +-- test/test_change_stream.py | 8 +++- test/test_collection.py | 4 +- test/test_cursor.py | 13 ++++-- test/test_custom_types.py | 1 - test/test_database.py | 6 +-- test/test_encryption.py | 1 - test/test_json_util.py | 6 +-- test/test_raw_bson.py | 1 - test/test_read_preferences.py | 6 +-- test/test_sdam_monitoring_spec.py | 3 +- test/test_son.py | 1 - test/test_srv_polling.py | 1 - test/test_typing.py | 20 +++++++-- test/test_unified_format.py | 3 +- test/test_uri_parser.py | 3 +- test/test_uri_spec.py | 4 +- test/unicode/test_utf8.py | 1 - test/unified_format.py | 2 +- test/utils.py | 2 +- tools/clean.py | 2 +- tox.ini | 7 ++-- 53 files changed, 170 insertions(+), 148 deletions(-) delete mode 100644 mypy.ini create mode 100644 mypy_test.ini delete mode 100644 pytest.ini diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 624cff1bf2..50d2f3ec05 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -3,6 +3,7 @@ name: Python Tests on: push: pull_request: + workflow_dispatch: concurrency: group: tests-${{ github.ref }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 00a03defcd..6a50a72306 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.5.0 hooks: - id: check-added-large-files - id: check-case-conflict @@ -17,8 +17,8 @@ repos: exclude: .patch exclude_types: [json] -- repo: https://github.com/psf/black - rev: 22.3.0 +- repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.10.1 hooks: - id: black files: \.py$ @@ -26,13 +26,13 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.0 + rev: v0.1.3 hooks: - id: ruff args: ["--fix", "--show-fixes"] - repo: https://github.com/adamchainz/blacken-docs - rev: "1.13.0" + rev: "1.16.0" hooks: - id: blacken-docs additional_dependencies: @@ -55,7 +55,7 @@ repos: # We use the Python version instead of the original version which seems to require Docker # https://github.com/koalaman/shellcheck-precommit - repo: https://github.com/shellcheck-py/shellcheck-py - rev: v0.8.0.4 + rev: v0.9.0.6 hooks: - id: shellcheck name: shellcheck @@ -63,14 +63,14 @@ repos: stages: [manual] - repo: https://github.com/PyCQA/doc8 - rev: 0.11.1 + rev: v1.1.1 hooks: - id: doc8 args: ["--ignore=D001"] # ignore line length stages: [manual] - repo: https://github.com/sirosen/check-jsonschema - rev: 0.14.1 + rev: 0.27.0 hooks: - id: check-jsonschema name: "Check GitHub Workflows" @@ -80,15 +80,16 @@ repos: stages: [manual] - repo: https://github.com/ariebovenberg/slotscheck - rev: v0.14.0 + rev: v0.17.0 hooks: - id: slotscheck files: \.py$ exclude: "^(test|tools)/" stages: [manual] + args: ["--no-strict-imports"] - repo: https://github.com/codespell-project/codespell - rev: "v2.2.4" + rev: "v2.2.6" hooks: - id: codespell # Examples of errors or updates to justify the exceptions: diff --git a/MANIFEST.in b/MANIFEST.in index 444da54d57..710eae8985 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,7 +3,6 @@ include LICENSE include THIRD-PARTY-NOTICES include *.ini exclude .coveragerc -exclude .flake8 exclude .git-blame-ignore-revs exclude .pre-commit-config.yaml exclude .readthedocs.yaml diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 5fd52aa7c1..0000000000 --- a/mypy.ini +++ /dev/null @@ -1,40 +0,0 @@ -[mypy] -python_version = 3.7 -check_untyped_defs = true -disallow_subclassing_any = true -disallow_incomplete_defs = true -no_implicit_optional = true -pretty = true -show_error_context = true -show_error_codes = true -strict_equality = true -warn_unused_configs = true -warn_unused_ignores = true -warn_redundant_casts = true - -[mypy-gevent.*] -ignore_missing_imports = True - -[mypy-kerberos.*] -ignore_missing_imports = True - -[mypy-mockupdb] -ignore_missing_imports = True - -[mypy-pymongo_auth_aws.*] -ignore_missing_imports = True - -[mypy-pymongocrypt.*] -ignore_missing_imports = True - -[mypy-service_identity.*] -ignore_missing_imports = True - -[mypy-snappy.*] -ignore_missing_imports = True - -[mypy-test.test_typing] -warn_unused_ignores = True - -[mypy-winkerberos.*] -ignore_missing_imports = True diff --git a/mypy_test.ini b/mypy_test.ini new file mode 100644 index 0000000000..bfd05bf815 --- /dev/null +++ b/mypy_test.ini @@ -0,0 +1,9 @@ +[mypy] +python_version = 3.7 +strict = true +show_error_codes = true +disable_error_code = attr-defined, union-attr, var-annotated, assignment, no-redef, type-arg, import, no-untyped-call, no-untyped-def, index, no-any-return, misc +exclude = (?x)( + ^test/mypy_fails/*.*$ + | ^test/conftest.py$ + ) diff --git a/pymongo/aggregation.py b/pymongo/aggregation.py index ef6af1092e..a6f8eeade9 100644 --- a/pymongo/aggregation.py +++ b/pymongo/aggregation.py @@ -122,7 +122,6 @@ def _database(self) -> Database: def get_read_preference( self, session: Optional[ClientSession] ) -> Union[_AggWritePref, _ServerMode]: - if self._write_preference: return self._write_preference pref = self._target._read_preference_for(session) diff --git a/pymongo/auth.py b/pymongo/auth.py index 58fc36d051..1926a3ba92 100644 --- a/pymongo/auth.py +++ b/pymongo/auth.py @@ -49,13 +49,13 @@ HAVE_KERBEROS = True _USE_PRINCIPAL = False try: - import winkerberos as kerberos + import winkerberos as kerberos # type:ignore[import] if tuple(map(int, kerberos.__version__.split(".")[:2])) >= (0, 5): _USE_PRINCIPAL = True except ImportError: try: - import kerberos + import kerberos # type:ignore[import] except ImportError: HAVE_KERBEROS = False diff --git a/pymongo/auth_aws.py b/pymongo/auth_aws.py index 81f30c7ae3..e704dcd9c1 100644 --- a/pymongo/auth_aws.py +++ b/pymongo/auth_aws.py @@ -16,8 +16,12 @@ from __future__ import annotations try: - import pymongo_auth_aws - from pymongo_auth_aws import AwsCredential, AwsSaslContext, PyMongoAuthAwsError + import pymongo_auth_aws # type:ignore[import] + from pymongo_auth_aws import ( + AwsCredential, + AwsSaslContext, + PyMongoAuthAwsError, + ) _HAVE_MONGODB_AWS = True except ImportError: @@ -29,7 +33,10 @@ def __init__(self, credentials: MongoCredential): _HAVE_MONGODB_AWS = False try: - from pymongo_auth_aws.auth import set_cached_credentials, set_use_cached_credentials + from pymongo_auth_aws.auth import ( # type:ignore[import] + set_cached_credentials, + set_use_cached_credentials, + ) # Enable credential caching. set_use_cached_credentials(True) diff --git a/pymongo/client_options.py b/pymongo/client_options.py index d5f9cfcccd..60332605a3 100644 --- a/pymongo/client_options.py +++ b/pymongo/client_options.py @@ -20,7 +20,6 @@ from bson.codec_options import _parse_codec_options from pymongo import common from pymongo.auth import MongoCredential, _build_credentials_tuple -from pymongo.common import validate_boolean from pymongo.compression_support import CompressionSettings from pymongo.errors import ConfigurationError from pymongo.monitoring import _EventListener, _EventListeners @@ -33,11 +32,11 @@ ) from pymongo.server_selectors import any_server_selector from pymongo.ssl_support import get_ssl_context -from pymongo.write_concern import WriteConcern +from pymongo.write_concern import WriteConcern, validate_boolean if TYPE_CHECKING: from bson.codec_options import CodecOptions - from pymongo.encryption import AutoEncryptionOpts + from pymongo.encryption_options import AutoEncryptionOpts from pymongo.pyopenssl_context import SSLContext from pymongo.topology_description import _ServerSelector diff --git a/pymongo/collation.py b/pymongo/collation.py index e940868e59..e025abd80a 100644 --- a/pymongo/collation.py +++ b/pymongo/collation.py @@ -21,6 +21,7 @@ from typing import Any, Mapping, Optional, Union from pymongo import common +from pymongo.write_concern import validate_boolean class CollationStrength: @@ -170,13 +171,13 @@ def __init__( locale = common.validate_string("locale", locale) self.__document: dict[str, Any] = {"locale": locale} if caseLevel is not None: - self.__document["caseLevel"] = common.validate_boolean("caseLevel", caseLevel) + self.__document["caseLevel"] = validate_boolean("caseLevel", caseLevel) if caseFirst is not None: self.__document["caseFirst"] = common.validate_string("caseFirst", caseFirst) if strength is not None: self.__document["strength"] = common.validate_integer("strength", strength) if numericOrdering is not None: - self.__document["numericOrdering"] = common.validate_boolean( + self.__document["numericOrdering"] = validate_boolean( "numericOrdering", numericOrdering ) if alternate is not None: @@ -184,11 +185,9 @@ def __init__( if maxVariable is not None: self.__document["maxVariable"] = common.validate_string("maxVariable", maxVariable) if normalization is not None: - self.__document["normalization"] = common.validate_boolean( - "normalization", normalization - ) + self.__document["normalization"] = validate_boolean("normalization", normalization) if backwards is not None: - self.__document["backwards"] = common.validate_boolean("backwards", backwards) + self.__document["backwards"] = validate_boolean("backwards", backwards) self.__document.update(kwargs) @property diff --git a/pymongo/collection.py b/pymongo/collection.py index 7f4354e7d1..38804afd9b 100644 --- a/pymongo/collection.py +++ b/pymongo/collection.py @@ -80,7 +80,7 @@ UpdateResult, ) from pymongo.typings import _CollationIn, _DocumentType, _DocumentTypeArg, _Pipeline -from pymongo.write_concern import WriteConcern +from pymongo.write_concern import WriteConcern, validate_boolean T = TypeVar("T") @@ -112,7 +112,6 @@ class ReturnDocument: if TYPE_CHECKING: - import bson from pymongo.aggregation import _AggregationCommand from pymongo.client_session import ClientSession @@ -775,7 +774,7 @@ def _update( comment: Optional[Any] = None, ) -> Optional[Mapping[str, Any]]: """Internal update / replace helper.""" - common.validate_boolean("upsert", upsert) + validate_boolean("upsert", upsert) collation = validate_collation_or_none(collation) write_concern = write_concern or self.write_concern acknowledged = write_concern.acknowledged @@ -3093,7 +3092,7 @@ def __find_and_modify( if sort is not None: cmd["sort"] = helpers._index_document(sort) if upsert is not None: - common.validate_boolean("upsert", upsert) + validate_boolean("upsert", upsert) cmd["upsert"] = upsert if hint is not None: if not isinstance(hint, str): diff --git a/pymongo/compression_support.py b/pymongo/compression_support.py index ad54d628bf..98671b8237 100644 --- a/pymongo/compression_support.py +++ b/pymongo/compression_support.py @@ -17,7 +17,7 @@ from typing import Any, Iterable, Optional, Union try: - import snappy + import snappy # type:ignore[import] _HAVE_SNAPPY = True except ImportError: diff --git a/pymongo/cursor.py b/pymongo/cursor.py index 6dfb3ba90b..23b7597efc 100644 --- a/pymongo/cursor.py +++ b/pymongo/cursor.py @@ -40,7 +40,6 @@ from pymongo import helpers from pymongo.collation import validate_collation_or_none from pymongo.common import ( - validate_boolean, validate_is_document_type, validate_is_mapping, ) @@ -57,6 +56,7 @@ ) from pymongo.response import PinnedResponse from pymongo.typings import _Address, _CollationIn, _DocumentOut, _DocumentType +from pymongo.write_concern import validate_boolean if TYPE_CHECKING: from _typeshed import SupportsItems diff --git a/pymongo/encryption.py b/pymongo/encryption.py index cdaf2358d2..0d4966a4a8 100644 --- a/pymongo/encryption.py +++ b/pymongo/encryption.py @@ -33,11 +33,11 @@ ) try: - from pymongocrypt.auto_encrypter import AutoEncrypter - from pymongocrypt.errors import MongoCryptError - from pymongocrypt.explicit_encrypter import ExplicitEncrypter - from pymongocrypt.mongocrypt import MongoCryptOptions - from pymongocrypt.state_machine import MongoCryptCallback + from pymongocrypt.auto_encrypter import AutoEncrypter # type:ignore[import] + from pymongocrypt.errors import MongoCryptError # type:ignore[import] + from pymongocrypt.explicit_encrypter import ExplicitEncrypter # type:ignore[import] + from pymongocrypt.mongocrypt import MongoCryptOptions # type:ignore[import] + from pymongocrypt.state_machine import MongoCryptCallback # type:ignore[import] _HAVE_PYMONGOCRYPT = True except ImportError: @@ -382,9 +382,9 @@ def _get_internal_client( opts._mongocryptd_uri, connect=False, serverSelectionTimeoutMS=_MONGOCRYPTD_TIMEOUT_MS ) - io_callbacks = _EncryptionIO( + io_callbacks = _EncryptionIO( # type:ignore[misc] metadata_client, key_vault_coll, mongocryptd_client, opts - ) # type:ignore[misc] + ) self._auto_encrypter = AutoEncrypter( io_callbacks, MongoCryptOptions( diff --git a/pymongo/encryption_options.py b/pymongo/encryption_options.py index 61480467a3..36b366cafe 100644 --- a/pymongo/encryption_options.py +++ b/pymongo/encryption_options.py @@ -18,7 +18,7 @@ from typing import TYPE_CHECKING, Any, Mapping, Optional try: - import pymongocrypt # noqa: F401 + import pymongocrypt # type:ignore[import] # noqa: F401 _HAVE_PYMONGOCRYPT = True except ImportError: diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index 5d3cfcd832..927bf2344b 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -86,7 +86,7 @@ WaitQueueTimeoutError, ) from pymongo.lock import _HAS_REGISTER_AT_FORK, _create_lock, _release_locks -from pymongo.pool import ConnectionClosedReason +from pymongo.monitoring import ConnectionClosedReason from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.server_selectors import writable_server_selector from pymongo.server_type import SERVER_TYPE diff --git a/pymongo/monitor.py b/pymongo/monitor.py index 92b12f7317..4e6f86934c 100644 --- a/pymongo/monitor.py +++ b/pymongo/monitor.py @@ -52,6 +52,7 @@ def __init__(self, topology: Topology, name: str, interval: int, min_interval: f The background thread is signaled to stop when the Topology or this instance is freed. """ + # We strongly reference the executor and it weakly references us via # this closure. When the monitor is freed, stop the executor soon. def target() -> bool: diff --git a/pymongo/operations.py b/pymongo/operations.py index 2c48a2994e..d780120656 100644 --- a/pymongo/operations.py +++ b/pymongo/operations.py @@ -29,9 +29,10 @@ from bson.raw_bson import RawBSONDocument from pymongo import helpers from pymongo.collation import validate_collation_or_none -from pymongo.common import validate_boolean, validate_is_mapping, validate_list +from pymongo.common import validate_is_mapping, validate_list from pymongo.helpers import _gen_index_name, _index_document, _index_list from pymongo.typings import _CollationIn, _DocumentType, _Pipeline +from pymongo.write_concern import validate_boolean if TYPE_CHECKING: from bson.son import SON @@ -267,7 +268,13 @@ def _add_to_bulk(self, bulkobj: _Bulk) -> None: def __eq__(self, other: Any) -> bool: if type(other) == type(self): - return (other._filter, other._doc, other._upsert, other._collation, other._hint,) == ( + return ( + other._filter, + other._doc, + other._upsert, + other._collation, + other._hint, + ) == ( self._filter, self._doc, self._upsert, diff --git a/pymongo/pool.py b/pymongo/pool.py index cdafb2cc2c..a7b2fcb8cc 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -54,7 +54,7 @@ ORDERED_TYPES, WAIT_QUEUE_TIMEOUT, ) -from pymongo.errors import ( +from pymongo.errors import ( # type:ignore[attr-defined] AutoReconnect, ConfigurationError, ConnectionFailure, diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index 6657937e99..4f3cc27ba3 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -34,7 +34,7 @@ from service_identity.pyopenssl import verify_ip_address as _verify_ip_address from pymongo.errors import ConfigurationError as _ConfigurationError -from pymongo.errors import _CertificateError +from pymongo.errors import _CertificateError # type:ignore[attr-defined] from pymongo.ocsp_cache import _OCSPCache from pymongo.ocsp_support import _load_trusted_ca_certs, _ocsp_callback from pymongo.socket_checker import SocketChecker as _SocketChecker @@ -270,7 +270,7 @@ def __get_options(self) -> None: return self._ctx.set_options(0) def __set_options(self, value: int) -> None: - # Explcitly convert to int, since newer CPython versions + # Explicitly convert to int, since newer CPython versions # use enum.IntFlag for options. The values are the same # regardless of implementation. self._ctx.set_options(int(value)) diff --git a/pymongo/ssl_support.py b/pymongo/ssl_support.py index 3c9ee01ef1..849fbf7018 100644 --- a/pymongo/ssl_support.py +++ b/pymongo/ssl_support.py @@ -97,7 +97,7 @@ class SSLError(Exception): # type: ignore HAS_SNI = False IPADDR_SAFE = False - BLOCKING_IO_ERRORS = () # type: ignore + BLOCKING_IO_ERRORS = () # type:ignore[assignment] def get_ssl_context(*dummy): # type: ignore """No ssl module, raise ConfigurationError.""" diff --git a/pymongo/topology_description.py b/pymongo/topology_description.py index 141f74edf3..e51378a022 100644 --- a/pymongo/topology_description.py +++ b/pymongo/topology_description.py @@ -586,7 +586,6 @@ def _update_rs_from_primary( server.server_type is SERVER_TYPE.RSPrimary and server.address != server_description.address ): - # Reset old primary's type to Unknown. sds[server.address] = server.to_unknown() diff --git a/pymongo/write_concern.py b/pymongo/write_concern.py index ab6629fbbc..893d05b688 100644 --- a/pymongo/write_concern.py +++ b/pymongo/write_concern.py @@ -20,7 +20,7 @@ from pymongo.errors import ConfigurationError -# Moved here to avoid a circular import. +# Duplicated here to avoid a circular import. def validate_boolean(option: str, value: Any) -> bool: """Validates that 'value' is True or False.""" if isinstance(value, bool): diff --git a/pyproject.toml b/pyproject.toml index 78925e6024..b435afb721 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,9 +96,46 @@ bson=["py.typed", "*.pyi"] pymongo=["py.typed", "*.pyi"] gridfs=["py.typed", "*.pyi"] +[tool.pytest.ini_options] +minversion = "7" +addopts = ["-ra", "--strict-config", "--strict-markers", "--junitxml=xunit-results/TEST-results.xml"] +testpaths = ["test"] +log_cli_level = "INFO" +norecursedirs = ["test/*"] +faulthandler_timeout = 1500 +xfail_strict = true +filterwarnings = [ + "error", + # Internal warnings raised during tests. + "module:use an explicit session with no_cursor_timeout=True:UserWarning", + "module:serverselectiontimeoutms must be:UserWarning", + "module:Unsupported compressor:UserWarning", + "module:zlibcompressionlevel must be:UserWarning", + "module:Wire protocol compression with:UserWarning", + # TODO: Remove as part of PYTHON-3923. + "module:unclosed Iterable[str]: yield os.path.join(dirpath, filename) -def only_type_check(func): +FuncT = TypeVar("FuncT", bound=Callable[..., None]) + + +def only_type_check(func: FuncT) -> FuncT: def inner(*args, **kwargs): if not TYPE_CHECKING: raise unittest.SkipTest("Used for Type Checking Only") func(*args, **kwargs) - return inner + return cast(FuncT, inner) class TestMypyFails(unittest.TestCase): diff --git a/test/test_unified_format.py b/test/test_unified_format.py index bc6dbcc5c2..1b3a134237 100644 --- a/test/test_unified_format.py +++ b/test/test_unified_format.py @@ -15,6 +15,7 @@ import os import sys +from typing import Any sys.path[0:0] = [""] @@ -58,7 +59,7 @@ def setUp(self): self.match_evaluator = MatchEvaluatorUtil(self) def test_unsetOrMatches(self): - spec = {"$$unsetOrMatches": {"y": {"$$unsetOrMatches": 2}}} + spec: dict[str, Any] = {"$$unsetOrMatches": {"y": {"$$unsetOrMatches": 2}}} for actual in [{}, {"y": 2}, None]: self.match_evaluator.match_result(spec, actual) diff --git a/test/test_uri_parser.py b/test/test_uri_parser.py index e1e59eb651..d5a25f5900 100644 --- a/test/test_uri_parser.py +++ b/test/test_uri_parser.py @@ -18,6 +18,7 @@ import copy import sys import warnings +from typing import Any from urllib.parse import quote_plus sys.path[0:0] = [""] @@ -470,7 +471,7 @@ def test_unquote_after_parsing(self): "&authMechanismProperties=AWS_SESSION_TOKEN:" + quoted_val ) res = parse_uri(uri) - options = { + options: dict[str, Any] = { "authmechanism": "MONGODB-AWS", "authmechanismproperties": {"AWS_SESSION_TOKEN": unquoted_val}, } diff --git a/test/test_uri_spec.py b/test/test_uri_spec.py index ad48fe787c..8c2fd2d048 100644 --- a/test/test_uri_spec.py +++ b/test/test_uri_spec.py @@ -83,7 +83,9 @@ def workdir_context_decorator(func): def modified_test_scenario(*args, **kwargs): original_workdir = os.getcwd() os.chdir(target_workdir) - func(*args, **kwargs) + with warnings.catch_warnings(): + warnings.simplefilter("default") + func(*args, **kwargs) os.chdir(original_workdir) return modified_test_scenario diff --git a/test/unicode/test_utf8.py b/test/unicode/test_utf8.py index fd7fb2154d..578d98bffb 100644 --- a/test/unicode/test_utf8.py +++ b/test/unicode/test_utf8.py @@ -11,7 +11,6 @@ class TestUTF8(unittest.TestCase): - # Verify that python and bson have the same understanding of # legal utf-8 if the first byte is 0xf4 (244) def _assert_same_utf8_validation(self, data): diff --git a/test/unified_format.py b/test/unified_format.py index 68ce36e6fa..0fcabcc707 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -169,7 +169,7 @@ def with_metaclass(meta, *bases): # the actual metaclass. class metaclass(type): def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): + if sys.version_info[:2] >= (3, 7): # noqa: UP036 # This version introduced PEP 560 that requires a bit # of extra care (we mimic what is done by __build_class__). resolved_bases = types.resolve_bases(bases) diff --git a/test/utils.py b/test/utils.py index c8f9197c64..e98016ac72 100644 --- a/test/utils.py +++ b/test/utils.py @@ -938,7 +938,7 @@ def gevent_monkey_patched(): try: import socket - import gevent.socket + import gevent.socket # type:ignore[import] return socket.socket is gevent.socket.socket except ImportError: diff --git a/tools/clean.py b/tools/clean.py index 15db9a411b..b6e1867a0a 100644 --- a/tools/clean.py +++ b/tools/clean.py @@ -41,7 +41,7 @@ pass try: - from bson import _cbson # noqa: F401 + from bson import _cbson # type: ignore[attr-defined] # noqa: F401 sys.exit("could still import _cbson") except ImportError: diff --git a/tox.ini b/tox.ini index 76c8700fef..7e889dff2e 100644 --- a/tox.ini +++ b/tox.ini @@ -92,9 +92,8 @@ deps = certifi; platform_system == "win32" or platform_system == "Darwin" typing_extensions commands = - mypy --install-types --non-interactive --strict bson - mypy --install-types --non-interactive --disallow-untyped-defs gridfs tools pymongo - mypy --install-types --non-interactive --disable-error-code var-annotated --disable-error-code attr-defined --disable-error-code union-attr --disable-error-code assignment --disable-error-code no-redef --disable-error-code index --allow-redefinition --allow-untyped-globals --exclude "test/mypy_fails/*.*" --exclude "test/conftest.py" test + mypy --install-types --non-interactive bson gridfs tools pymongo + mypy --install-types --non-interactive --config-file mypy_test.ini test mypy --install-types --non-interactive test/test_typing.py test/test_typing_strict.py [testenv:typecheck-pyright] @@ -157,6 +156,8 @@ deps = https://github.com/ajdavis/mongo-mockup-db/archive/master.zip extras = {[testenv:test]extras} +allowlist_externals = + .evergreen/check-c-extensions.sh passenv = * allowlist_externals = {[testenv:test]allowlist_externals} From 923c8a5abee4de45c243ca727cf63d862a4161c8 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 2 Nov 2023 12:46:40 -0500 Subject: [PATCH 004/221] PYTHON-4024 Use ruff formatter (#1419) --- .pre-commit-config.yaml | 8 +------- bson/son.py | 4 +++- green_framework_test.py | 3 ++- pymongo/database.py | 5 ++++- pymongo/saslprep.py | 3 ++- pyproject.toml | 1 - test/lambda/mongodb/app.py | 7 ++++++- test/mod_wsgi_test/test_client.py | 3 +-- test/pymongo_mocks.py | 2 +- test/test_bson.py | 8 ++++++-- test/test_comment.py | 7 ++++++- test/test_encryption.py | 9 +++++++-- test/test_monitoring.py | 17 +++++++++++++---- test/test_sdam_monitoring_spec.py | 15 +++++++++------ test/test_ssl.py | 3 ++- test/test_topology.py | 8 ++++++-- test/unified_format.py | 5 +++-- tox.ini | 2 -- 18 files changed, 72 insertions(+), 38 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a50a72306..c605981a1d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,19 +17,13 @@ repos: exclude: .patch exclude_types: [json] -- repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.10.1 - hooks: - - id: black - files: \.py$ - args: [--line-length=100] - - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. rev: v0.1.3 hooks: - id: ruff args: ["--fix", "--show-fixes"] + - id: ruff-format - repo: https://github.com/adamchainz/blacken-docs rev: "1.16.0" diff --git a/bson/son.py b/bson/son.py index c5df4e5972..cf62717238 100644 --- a/bson/son.py +++ b/bson/son.py @@ -159,7 +159,9 @@ def update(self, other: Optional[Any] = None, **kwargs: _Value) -> None: # type if kwargs: self.update(kwargs) - def get(self, key: _Key, default: Optional[Union[_Value, _T]] = None) -> Union[_Value, _T, None]: # type: ignore[override] + def get( # type: ignore[override] + self, key: _Key, default: Optional[Union[_Value, _T]] = None + ) -> Union[_Value, _T, None]: try: return self[key] except KeyError: diff --git a/green_framework_test.py b/green_framework_test.py index 01f72b245a..65025798cf 100644 --- a/green_framework_test.py +++ b/green_framework_test.py @@ -98,7 +98,8 @@ def main(): sys.exit(1) run( - args[0], *args[1:] # Framework name. + args[0], + *args[1:], # Framework name. ) # Command line args to pytest, like what test to run. diff --git a/pymongo/database.py b/pymongo/database.py index 70cdee2dc3..75eabc5d34 100644 --- a/pymongo/database.py +++ b/pymongo/database.py @@ -551,7 +551,10 @@ def aggregate( user_fields={"cursor": {"firstBatch": 1}}, ) return self.client._retryable_read( - cmd.get_cursor, cmd.get_read_preference(s), s, retryable=not cmd._performs_write # type: ignore[arg-type] + cmd.get_cursor, + cmd.get_read_preference(s), # type: ignore[arg-type] + s, + retryable=not cmd._performs_write, ) def watch( diff --git a/pymongo/saslprep.py b/pymongo/saslprep.py index 02c845079a..c78ad0f0a9 100644 --- a/pymongo/saslprep.py +++ b/pymongo/saslprep.py @@ -23,7 +23,8 @@ HAVE_STRINGPREP = False def saslprep( - data: Any, prohibit_unassigned_code_points: Optional[bool] = True # noqa: ARG001 + data: Any, + prohibit_unassigned_code_points: Optional[bool] = True, # noqa: ARG001 ) -> Any: """SASLprep dummy""" if isinstance(data, str): diff --git a/pyproject.toml b/pyproject.toml index b435afb721..8f5fc0741f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,7 +144,6 @@ select = [ "C4", # flake8-comprehensions "EM", # flake8-errmsg "ICN", # flake8-import-conventions - "ISC", # flake8-implicit-str-concat "G", # flake8-logging-format "PGH", # pygrep-hooks "PIE", # flake8-pie diff --git a/test/lambda/mongodb/app.py b/test/lambda/mongodb/app.py index 65e6dc88ff..5840347d9a 100644 --- a/test/lambda/mongodb/app.py +++ b/test/lambda/mongodb/app.py @@ -130,7 +130,12 @@ def create_response(): # Reset the numbers. def reset(): - global open_connections, heartbeat_count, total_heartbeat_duration, total_commands, total_command_duration + global \ + open_connections, \ + heartbeat_count, \ + total_heartbeat_duration, \ + total_commands, \ + total_command_duration open_connections = 0 heartbeat_count = 0 total_heartbeat_duration = 0 diff --git a/test/mod_wsgi_test/test_client.py b/test/mod_wsgi_test/test_client.py index 63ae883473..88eeb7a57e 100644 --- a/test/mod_wsgi_test/test_client.py +++ b/test/mod_wsgi_test/test_client.py @@ -131,8 +131,7 @@ def main(options, mode, urls): if options.verbose: print( - "Getting {} {} times total in {} threads, " - "{} times per thread".format( + "Getting {} {} times total in {} threads, " "{} times per thread".format( urls, nrequests_per_thread * options.nthreads, options.nthreads, diff --git a/test/pymongo_mocks.py b/test/pymongo_mocks.py index 2b291c7bd3..c750d0cf71 100644 --- a/test/pymongo_mocks.py +++ b/test/pymongo_mocks.py @@ -47,7 +47,7 @@ def checkout(self, handler=None): assert host_and_port in ( client.mock_standalones + client.mock_members + client.mock_mongoses - ), ("bad host: %s" % host_and_port) + ), "bad host: %s" % host_and_port with Pool.checkout(self, handler) as conn: conn.mock_host = self.mock_host diff --git a/test/test_bson.py b/test/test_bson.py index 749c63bdf3..763885e5f4 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -783,7 +783,9 @@ def test_bson_regex(self): self.assertEqual(0, bson_re1.flags) doc1 = {"r": bson_re1} - doc1_bson = b"\x11\x00\x00\x00\x0br\x00[\\w-\\.]\x00\x00\x00" # document length # r: regex # document terminator + doc1_bson = ( + b"\x11\x00\x00\x00\x0br\x00[\\w-\\.]\x00\x00\x00" + ) # document length # r: regex # document terminator self.assertEqual(doc1_bson, encode(doc1)) self.assertEqual(doc1, decode(doc1_bson)) @@ -794,7 +796,9 @@ def test_bson_regex(self): doc2_with_re = {"r": re2} doc2_with_bson_re = {"r": bson_re2} - doc2_bson = b"\x11\x00\x00\x00\x0br\x00.*\x00imsux\x00\x00" # document length # r: regex # document terminator + doc2_bson = ( + b"\x11\x00\x00\x00\x0br\x00.*\x00imsux\x00\x00" + ) # document length # r: regex # document terminator self.assertEqual(doc2_bson, encode(doc2_with_re)) self.assertEqual(doc2_bson, encode(doc2_with_bson_re)) diff --git a/test/test_comment.py b/test/test_comment.py index baac68be58..047bc3f550 100644 --- a/test/test_comment.py +++ b/test/test_comment.py @@ -42,7 +42,12 @@ def empty(self, *args, **kwargs): class TestComment(IntegrationTest): def _test_ops( - self, helpers, already_supported, listener, db=Empty(), coll=Empty() # noqa: B008 + self, + helpers, + already_supported, + listener, + db=Empty(), # noqa: B008 + coll=Empty(), # noqa: B008 ): for h, args in helpers: c = "testing comment with " + h.__name__ diff --git a/test/test_encryption.py b/test/test_encryption.py index d034529b46..9de0707e7b 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -503,7 +503,10 @@ def test_bson_errors(self): def test_codec_options(self): with self.assertRaisesRegex(TypeError, "codec_options must be"): ClientEncryption( - KMS_PROVIDERS, "keyvault.datakeys", client_context.client, None # type: ignore[arg-type] + KMS_PROVIDERS, + "keyvault.datakeys", + client_context.client, + None, # type: ignore[arg-type] ) opts = CodecOptions(uuid_representation=UuidRepresentation.JAVA_LEGACY) @@ -1072,7 +1075,9 @@ def _test_corpus(self, opts): try: encrypted_val = client_encryption.encrypt( - value["value"], algo, **kwargs # type: ignore[arg-type] + value["value"], + algo, + **kwargs, # type: ignore[arg-type] ) if not value["allowed"]: self.fail(f"encrypt should have failed: {key!r}: {value!r}") diff --git a/test/test_monitoring.py b/test/test_monitoring.py index 6880a30dc7..26a1da3f81 100644 --- a/test/test_monitoring.py +++ b/test/test_monitoring.py @@ -1191,7 +1191,9 @@ def test_server_heartbeat_event_repr(self): ) delta = 0.1 event = monitoring.ServerHeartbeatSucceededEvent( - delta, {"ok": 1}, connection_id # type: ignore[arg-type] + delta, + {"ok": 1}, # type: ignore[arg-type] + connection_id, ) self.assertEqual( repr(event), @@ -1199,7 +1201,9 @@ def test_server_heartbeat_event_repr(self): "duration: 0.1, awaited: False, reply: {'ok': 1}>", ) event = monitoring.ServerHeartbeatFailedEvent( - delta, "ERROR", connection_id # type: ignore[arg-type] + delta, + "ERROR", # type: ignore[arg-type] + connection_id, ) self.assertEqual( repr(event), @@ -1216,7 +1220,10 @@ def test_server_event_repr(self): "", ) event = monitoring.ServerDescriptionChangedEvent( - "PREV", "NEW", server_address, topology_id # type: ignore[arg-type] + "PREV", # type: ignore[arg-type] + "NEW", # type: ignore[arg-type] + server_address, + topology_id, ) self.assertEqual( repr(event), @@ -1233,7 +1240,9 @@ def test_topology_event_repr(self): event = monitoring.TopologyOpenedEvent(topology_id) self.assertEqual(repr(event), "") event = monitoring.TopologyDescriptionChangedEvent( - "PREV", "NEW", topology_id # type: ignore[arg-type] + "PREV", # type: ignore[arg-type] + "NEW", # type: ignore[arg-type] + topology_id, ) self.assertEqual( repr(event), diff --git a/test/test_sdam_monitoring_spec.py b/test/test_sdam_monitoring_spec.py index 6e6a07cd60..105ffaf034 100644 --- a/test/test_sdam_monitoring_spec.py +++ b/test/test_sdam_monitoring_spec.py @@ -83,8 +83,9 @@ def compare_events(expected_dict, actual): if expected["address"] != "{}:{}".format(*actual.server_address): return ( False, - "ServerOpeningEvent published with wrong address (expected" - " {}, got {}".format(expected["address"], actual.server_address), + "ServerOpeningEvent published with wrong address (expected" " {}, got {}".format( + expected["address"], actual.server_address + ), ) elif expected_type == "server_description_changed_event": @@ -93,8 +94,9 @@ def compare_events(expected_dict, actual): if expected["address"] != "{}:{}".format(*actual.server_address): return ( False, - "ServerDescriptionChangedEvent has wrong address" - " (expected {}, got {}".format(expected["address"], actual.server_address), + "ServerDescriptionChangedEvent has wrong address" " (expected {}, got {}".format( + expected["address"], actual.server_address + ), ) if not compare_server_descriptions(expected["newDescription"], actual.new_description): @@ -113,8 +115,9 @@ def compare_events(expected_dict, actual): if expected["address"] != "{}:{}".format(*actual.server_address): return ( False, - "ServerClosedEvent published with wrong address" - " (expected {}, got {}".format(expected["address"], actual.server_address), + "ServerClosedEvent published with wrong address" " (expected {}, got {}".format( + expected["address"], actual.server_address + ), ) elif expected_type == "topology_opening_event": diff --git a/test/test_ssl.py b/test/test_ssl.py index bde385138c..3b307df39e 100644 --- a/test/test_ssl.py +++ b/test/test_ssl.py @@ -447,7 +447,8 @@ def test_validation_with_system_ca_certs(self): # Server cert and hostname are verified. connected( MongoClient( - "mongodb://localhost/?ssl=true&serverSelectionTimeoutMS=1000", **self.credentials # type: ignore[arg-type] + "mongodb://localhost/?ssl=true&serverSelectionTimeoutMS=1000", + **self.credentials, # type: ignore[arg-type] ) ) diff --git a/test/test_topology.py b/test/test_topology.py index 88c99d2a28..3ee33afab7 100644 --- a/test/test_topology.py +++ b/test/test_topology.py @@ -452,7 +452,9 @@ def test_discover_set_name_from_primary(self): # Discovering a replica set without the setName supplied by the user # is not yet supported by MongoClient, but Topology can do it. topology_settings = SetNameDiscoverySettings( - seeds=[address], pool_class=MockPool, monitor_class=DummyMonitor # type: ignore[arg-type] + seeds=[address], + pool_class=MockPool, # type: ignore[arg-type] + monitor_class=DummyMonitor, # type: ignore[arg-type] ) t = Topology(topology_settings) @@ -480,7 +482,9 @@ def test_discover_set_name_from_secondary(self): # Discovering a replica set without the setName supplied by the user # is not yet supported by MongoClient, but Topology can do it. topology_settings = SetNameDiscoverySettings( - seeds=[address], pool_class=MockPool, monitor_class=DummyMonitor # type: ignore[arg-type] + seeds=[address], + pool_class=MockPool, # type: ignore[arg-type] + monitor_class=DummyMonitor, # type: ignore[arg-type] ) t = Topology(topology_settings) diff --git a/test/unified_format.py b/test/unified_format.py index 0fcabcc707..99758989c9 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -1404,8 +1404,9 @@ def _testOperation_targetedFailPoint(self, spec): session = self.entity_map[spec["session"]] if not session._pinned_address: self.fail( - "Cannot use targetedFailPoint operation with unpinned " - "session {}".format(spec["session"]) + "Cannot use targetedFailPoint operation with unpinned " "session {}".format( + spec["session"] + ) ) client = single_client("{}:{}".format(*session._pinned_address)) diff --git a/tox.ini b/tox.ini index 7e889dff2e..bbf2307d90 100644 --- a/tox.ini +++ b/tox.ini @@ -156,8 +156,6 @@ deps = https://github.com/ajdavis/mongo-mockup-db/archive/master.zip extras = {[testenv:test]extras} -allowlist_externals = - .evergreen/check-c-extensions.sh passenv = * allowlist_externals = {[testenv:test]allowlist_externals} From 6c88c732194335c8ec8e9c2c6fbd8c81d34151fc Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Thu, 2 Nov 2023 13:13:45 -0700 Subject: [PATCH 005/221] PYTHON-3837 Driver Container and Kubernetes Awareness (#1418) --- pymongo/pool.py | 23 +++++++++++++++++++++++ test/test_client.py | 12 +++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/pymongo/pool.py b/pymongo/pool.py index a7b2fcb8cc..a9271490f0 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -25,6 +25,7 @@ import threading import time import weakref +from pathlib import Path from typing import ( TYPE_CHECKING, Any, @@ -268,6 +269,25 @@ def _set_keepalive_times(sock: socket.socket) -> None: (platform.python_implementation(), ".".join(map(str, sys.version_info))) ) +DOCKER_ENV_PATH = "/.dockerenv" +ENV_VAR_K8S = "KUBERNETES_SERVICE_HOST" + +RUNTIME_NAME_DOCKER = "docker" +ORCHESTRATOR_NAME_K8S = "kubernetes" + + +def get_container_env_info() -> dict[str, str]: + """Returns the runtime and orchestrator of a container. + If neither value is present, the metadata client.env.container field will be omitted.""" + container = {} + + if Path(DOCKER_ENV_PATH).exists(): + container["runtime"] = RUNTIME_NAME_DOCKER + if os.getenv(ENV_VAR_K8S): + container["orchestrator"] = ORCHESTRATOR_NAME_K8S + + return container + def _is_lambda() -> bool: if os.getenv("AWS_LAMBDA_RUNTIME_API"): @@ -307,6 +327,9 @@ def _getenv_int(key: str) -> Optional[int]: def _metadata_env() -> dict[str, Any]: env: dict[str, Any] = {} + container = get_container_env_info() + if container: + env["container"] = container # Skip if multiple (or no) envs are matched. if (_is_lambda(), _is_azure_func(), _is_gcp_func(), _is_vercel()).count(True) != 1: return env diff --git a/test/test_client.py b/test/test_client.py index 8b2716a262..aceb153120 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -29,6 +29,7 @@ import threading import time from typing import Iterable, Type, no_type_check +from unittest import mock from unittest.mock import patch sys.path[0:0] = [""] @@ -97,7 +98,7 @@ ) from pymongo.mongo_client import MongoClient from pymongo.monitoring import ServerHeartbeatListener, ServerHeartbeatStartedEvent -from pymongo.pool import _METADATA, Connection, PoolOptions +from pymongo.pool import _METADATA, DOCKER_ENV_PATH, ENV_VAR_K8S, Connection, PoolOptions from pymongo.read_preferences import ReadPreference from pymongo.server_description import ServerDescription from pymongo.server_selectors import readable_server_selector, writable_server_selector @@ -347,6 +348,15 @@ def test_metadata(self): options = client._MongoClient__options self.assertEqual(options.pool_options.metadata, metadata) + @mock.patch.dict("os.environ", {ENV_VAR_K8S: "1"}) + def test_container_metadata(self): + metadata = copy.deepcopy(_METADATA) + metadata["env"] = {} + metadata["env"]["container"] = {"orchestrator": "kubernetes"} + client = MongoClient("mongodb://foo:27017/?appname=foobar&connect=false") + options = client._MongoClient__options + self.assertEqual(options.pool_options.metadata["env"], metadata["env"]) + def test_kwargs_codec_options(self): class MyFloatType: def __init__(self, x): From afc2c285b2af30694604c46204cc89bdce9a98db Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 6 Nov 2023 12:51:20 -0600 Subject: [PATCH 006/221] DRIVERS-2543 Pull mongohouse image from ADL ECR repo (#1390) --- .evergreen/config.yml | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index f66b055362..882b229992 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -98,7 +98,7 @@ functions: # If this was a patch build, doing a fresh clone would not actually test the patch cp -R ${PROJECT_DIRECTORY}/ $DRIVERS_TOOLS else - git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS + git clone --branch DRIVERS-2543 https://github.com/blink1073/drivers-evergreen-tools.git $DRIVERS_TOOLS fi echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config @@ -313,18 +313,15 @@ functions: params: script: | ${PREPARE_SHELL} - set -o xtrace - # The mongohouse build script needs to be passed the VARIANT variable, see - # https://github.com/10gen/mongohouse/blob/973cc11/evergreen.yaml#L65 - VARIANT=rhel84-small bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/build-mongohouse-local.sh + bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/pull-mongohouse-image.sh - command: shell.exec type: setup params: - background: true script: | ${PREPARE_SHELL} - set -o xtrace - bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-local.sh + bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-image.sh + sleep 1 + docker ps "stop mongo-orchestration": - command: shell.exec @@ -2976,7 +2973,7 @@ buildvariants: - matrix_name: "data-lake-spec-tests" matrix_spec: - platform: rhel8 + platform: ubuntu-22.04 python-version: ["3.7", "3.10"] auth: "auth" c-extensions: "*" From f230a2e486aa5b376cf862b0ad4c9b6671067de9 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 6 Nov 2023 13:21:07 -0600 Subject: [PATCH 007/221] DRIVERS-2543 Fix source branch for drivers-tools (#1420) --- .evergreen/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 882b229992..b3ea323100 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -27,7 +27,7 @@ timeout: functions: "fetch source": - # Executes git clone and applies the submitted patch, if any + # Executes clone and applies the submitted patch, if any - command: git.get_project params: directory: "src" @@ -98,7 +98,7 @@ functions: # If this was a patch build, doing a fresh clone would not actually test the patch cp -R ${PROJECT_DIRECTORY}/ $DRIVERS_TOOLS else - git clone --branch DRIVERS-2543 https://github.com/blink1073/drivers-evergreen-tools.git $DRIVERS_TOOLS + git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS fi echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config From 83d0e7afa45692d80970457aae24519051b44c23 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 7 Nov 2023 13:19:20 -0600 Subject: [PATCH 008/221] PYTHON-4033 Address perf test UserWarning (#1422) --- .evergreen/perf.yml | 3 +-- test/performance/perf_test.py | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.evergreen/perf.yml b/.evergreen/perf.yml index 43b21a65fb..bd5a860477 100644 --- a/.evergreen/perf.yml +++ b/.evergreen/perf.yml @@ -26,7 +26,7 @@ timeout: functions: "fetch source": - # Executes git clone and applies the submitted patch, if any + # Executes clone and applies the submitted patch, if any - command: git.get_project params: directory: "src" @@ -67,7 +67,6 @@ functions: PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" PREPARE_SHELL: | set -o errexit - set -o xtrace export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" diff --git a/test/performance/perf_test.py b/test/performance/perf_test.py index 2ad4edaf8f..109fff3c14 100644 --- a/test/performance/perf_test.py +++ b/test/performance/perf_test.py @@ -122,7 +122,9 @@ def runTest(self): self.max_iterations = NUM_ITERATIONS for i in range(NUM_ITERATIONS): if time.monotonic() - start > MAX_ITERATION_TIME: - warnings.warn("Test timed out, completed %s iterations." % i) + with warnings.catch_warnings(): + warnings.simplefilter("default") + warnings.warn("Test timed out, completed %s iterations." % i) break self.before() with Timer() as timer: From 578024e16af92dcdb4f4a871e257d46483aff58b Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Wed, 8 Nov 2023 12:59:13 -0800 Subject: [PATCH 009/221] PYTHON-3170 Run ping command in SRV spec tests (#1424) --- .../load-balanced/loadBalanced-directConnection.json | 3 ++- test/srv_seedlist/load-balanced/loadBalanced-true-txt.json | 3 ++- test/srv_seedlist/load-balanced/srvMaxHosts-zero-txt.json | 3 ++- test/srv_seedlist/load-balanced/srvMaxHosts-zero.json | 3 ++- test/srv_seedlist/replica-set/direct-connection-false.json | 3 ++- test/srv_seedlist/replica-set/encoded-userinfo-and-db.json | 1 + test/srv_seedlist/replica-set/loadBalanced-false-txt.json | 3 ++- test/srv_seedlist/replica-set/longer-parent-in-return.json | 1 + test/srv_seedlist/replica-set/one-result-default-port.json | 3 ++- .../replica-set/one-txt-record-multiple-strings.json | 3 ++- test/srv_seedlist/replica-set/one-txt-record.json | 3 ++- test/srv_seedlist/replica-set/srv-service-name.json | 3 ++- .../replica-set/srvMaxHosts-equal_to_srv_records.json | 3 ++- .../replica-set/srvMaxHosts-greater_than_srv_records.json | 3 ++- .../replica-set/srvMaxHosts-less_than_srv_records.json | 3 ++- test/srv_seedlist/replica-set/srvMaxHosts-zero-txt.json | 3 ++- test/srv_seedlist/replica-set/srvMaxHosts-zero.json | 3 ++- test/srv_seedlist/replica-set/two-results-default-port.json | 3 ++- .../replica-set/two-results-nonstandard-port.json | 3 ++- .../replica-set/txt-record-with-overridden-ssl-option.json | 3 ++- .../replica-set/txt-record-with-overridden-uri-option.json | 3 ++- test/srv_seedlist/replica-set/uri-with-admin-database.json | 3 ++- test/srv_seedlist/replica-set/uri-with-auth.json | 5 +++++ .../sharded/srvMaxHosts-equal_to_srv_records.json | 3 ++- .../sharded/srvMaxHosts-greater_than_srv_records.json | 3 ++- .../sharded/srvMaxHosts-less_than_srv_records.json | 3 ++- test/srv_seedlist/sharded/srvMaxHosts-zero.json | 3 ++- test/test_dns.py | 2 ++ 28 files changed, 57 insertions(+), 24 deletions(-) diff --git a/test/srv_seedlist/load-balanced/loadBalanced-directConnection.json b/test/srv_seedlist/load-balanced/loadBalanced-directConnection.json index 3f500acdc6..8e459115c1 100644 --- a/test/srv_seedlist/load-balanced/loadBalanced-directConnection.json +++ b/test/srv_seedlist/load-balanced/loadBalanced-directConnection.json @@ -10,5 +10,6 @@ "loadBalanced": true, "ssl": true, "directConnection": false - } + }, + "ping": true } diff --git a/test/srv_seedlist/load-balanced/loadBalanced-true-txt.json b/test/srv_seedlist/load-balanced/loadBalanced-true-txt.json index f9719e760d..39bff5a23b 100644 --- a/test/srv_seedlist/load-balanced/loadBalanced-true-txt.json +++ b/test/srv_seedlist/load-balanced/loadBalanced-true-txt.json @@ -9,5 +9,6 @@ "options": { "loadBalanced": true, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/load-balanced/srvMaxHosts-zero-txt.json b/test/srv_seedlist/load-balanced/srvMaxHosts-zero-txt.json index a18360ea64..474a314fd7 100644 --- a/test/srv_seedlist/load-balanced/srvMaxHosts-zero-txt.json +++ b/test/srv_seedlist/load-balanced/srvMaxHosts-zero-txt.json @@ -10,5 +10,6 @@ "loadBalanced": true, "srvMaxHosts": 0, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/load-balanced/srvMaxHosts-zero.json b/test/srv_seedlist/load-balanced/srvMaxHosts-zero.json index bd85418117..dfc90dc96d 100644 --- a/test/srv_seedlist/load-balanced/srvMaxHosts-zero.json +++ b/test/srv_seedlist/load-balanced/srvMaxHosts-zero.json @@ -10,5 +10,6 @@ "loadBalanced": true, "srvMaxHosts": 0, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/direct-connection-false.json b/test/srv_seedlist/replica-set/direct-connection-false.json index 1d57bdcb3c..3f14ff94e7 100644 --- a/test/srv_seedlist/replica-set/direct-connection-false.json +++ b/test/srv_seedlist/replica-set/direct-connection-false.json @@ -11,5 +11,6 @@ "options": { "ssl": true, "directConnection": false - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/encoded-userinfo-and-db.json b/test/srv_seedlist/replica-set/encoded-userinfo-and-db.json index 70c6c23a39..4493628be9 100644 --- a/test/srv_seedlist/replica-set/encoded-userinfo-and-db.json +++ b/test/srv_seedlist/replica-set/encoded-userinfo-and-db.json @@ -17,5 +17,6 @@ "password": "$4to@L8=MC", "db": "mydb?" }, + "ping": false, "comment": "Encoded user, pass, and DB parse correctly" } diff --git a/test/srv_seedlist/replica-set/loadBalanced-false-txt.json b/test/srv_seedlist/replica-set/loadBalanced-false-txt.json index fd2e565c7b..682d32a742 100644 --- a/test/srv_seedlist/replica-set/loadBalanced-false-txt.json +++ b/test/srv_seedlist/replica-set/loadBalanced-false-txt.json @@ -11,5 +11,6 @@ "options": { "loadBalanced": false, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/longer-parent-in-return.json b/test/srv_seedlist/replica-set/longer-parent-in-return.json index 9a8267eaeb..ebe3fe1e77 100644 --- a/test/srv_seedlist/replica-set/longer-parent-in-return.json +++ b/test/srv_seedlist/replica-set/longer-parent-in-return.json @@ -12,5 +12,6 @@ "replicaSet": "repl0", "ssl": true }, + "ping": true, "comment": "Is correct, as returned host name shared the URI root \"test.build.10gen.cc\"." } diff --git a/test/srv_seedlist/replica-set/one-result-default-port.json b/test/srv_seedlist/replica-set/one-result-default-port.json index cebb3b1ec3..9f7733de80 100644 --- a/test/srv_seedlist/replica-set/one-result-default-port.json +++ b/test/srv_seedlist/replica-set/one-result-default-port.json @@ -11,5 +11,6 @@ "options": { "replicaSet": "repl0", "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/one-txt-record-multiple-strings.json b/test/srv_seedlist/replica-set/one-txt-record-multiple-strings.json index 622668c351..1d740b1b59 100644 --- a/test/srv_seedlist/replica-set/one-txt-record-multiple-strings.json +++ b/test/srv_seedlist/replica-set/one-txt-record-multiple-strings.json @@ -11,5 +11,6 @@ "options": { "replicaSet": "repl0", "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/one-txt-record.json b/test/srv_seedlist/replica-set/one-txt-record.json index 2385021ad4..ecdb0a7e2a 100644 --- a/test/srv_seedlist/replica-set/one-txt-record.json +++ b/test/srv_seedlist/replica-set/one-txt-record.json @@ -12,5 +12,6 @@ "replicaSet": "repl0", "authSource": "thisDB", "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/srv-service-name.json b/test/srv_seedlist/replica-set/srv-service-name.json index ec36cdbb00..e320c2ca3e 100644 --- a/test/srv_seedlist/replica-set/srv-service-name.json +++ b/test/srv_seedlist/replica-set/srv-service-name.json @@ -12,5 +12,6 @@ "options": { "ssl": true, "srvServiceName": "customname" - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/srvMaxHosts-equal_to_srv_records.json b/test/srv_seedlist/replica-set/srvMaxHosts-equal_to_srv_records.json index d9765ac663..70edacfd06 100644 --- a/test/srv_seedlist/replica-set/srvMaxHosts-equal_to_srv_records.json +++ b/test/srv_seedlist/replica-set/srvMaxHosts-equal_to_srv_records.json @@ -13,5 +13,6 @@ "options": { "srvMaxHosts": 2, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/srvMaxHosts-greater_than_srv_records.json b/test/srv_seedlist/replica-set/srvMaxHosts-greater_than_srv_records.json index 494bb87687..72540ed408 100644 --- a/test/srv_seedlist/replica-set/srvMaxHosts-greater_than_srv_records.json +++ b/test/srv_seedlist/replica-set/srvMaxHosts-greater_than_srv_records.json @@ -12,5 +12,6 @@ "options": { "srvMaxHosts": 3, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/srvMaxHosts-less_than_srv_records.json b/test/srv_seedlist/replica-set/srvMaxHosts-less_than_srv_records.json index 66a5e90dad..a9d6dd6fd9 100644 --- a/test/srv_seedlist/replica-set/srvMaxHosts-less_than_srv_records.json +++ b/test/srv_seedlist/replica-set/srvMaxHosts-less_than_srv_records.json @@ -9,5 +9,6 @@ "options": { "srvMaxHosts": 1, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/srvMaxHosts-zero-txt.json b/test/srv_seedlist/replica-set/srvMaxHosts-zero-txt.json index 241a901c64..e232edb9eb 100644 --- a/test/srv_seedlist/replica-set/srvMaxHosts-zero-txt.json +++ b/test/srv_seedlist/replica-set/srvMaxHosts-zero-txt.json @@ -13,5 +13,6 @@ "replicaSet": "repl0", "srvMaxHosts": 0, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/srvMaxHosts-zero.json b/test/srv_seedlist/replica-set/srvMaxHosts-zero.json index c68610a201..3421a35a3d 100644 --- a/test/srv_seedlist/replica-set/srvMaxHosts-zero.json +++ b/test/srv_seedlist/replica-set/srvMaxHosts-zero.json @@ -13,5 +13,6 @@ "replicaSet": "repl0", "srvMaxHosts": 0, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/two-results-default-port.json b/test/srv_seedlist/replica-set/two-results-default-port.json index 66028310a6..43efcc6310 100644 --- a/test/srv_seedlist/replica-set/two-results-default-port.json +++ b/test/srv_seedlist/replica-set/two-results-default-port.json @@ -12,5 +12,6 @@ "options": { "replicaSet": "repl0", "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/two-results-nonstandard-port.json b/test/srv_seedlist/replica-set/two-results-nonstandard-port.json index 4900f7cff1..f6e8e415a7 100644 --- a/test/srv_seedlist/replica-set/two-results-nonstandard-port.json +++ b/test/srv_seedlist/replica-set/two-results-nonstandard-port.json @@ -12,5 +12,6 @@ "options": { "replicaSet": "repl0", "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/txt-record-with-overridden-ssl-option.json b/test/srv_seedlist/replica-set/txt-record-with-overridden-ssl-option.json index 0ebc737bd5..3d84cfe446 100644 --- a/test/srv_seedlist/replica-set/txt-record-with-overridden-ssl-option.json +++ b/test/srv_seedlist/replica-set/txt-record-with-overridden-ssl-option.json @@ -12,5 +12,6 @@ "replicaSet": "repl0", "authSource": "thisDB", "ssl": false - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/txt-record-with-overridden-uri-option.json b/test/srv_seedlist/replica-set/txt-record-with-overridden-uri-option.json index 2626ba6083..1a5a240680 100644 --- a/test/srv_seedlist/replica-set/txt-record-with-overridden-uri-option.json +++ b/test/srv_seedlist/replica-set/txt-record-with-overridden-uri-option.json @@ -12,5 +12,6 @@ "replicaSet": "repl0", "authSource": "otherDB", "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/uri-with-admin-database.json b/test/srv_seedlist/replica-set/uri-with-admin-database.json index 32710d75f7..c5513a0dad 100644 --- a/test/srv_seedlist/replica-set/uri-with-admin-database.json +++ b/test/srv_seedlist/replica-set/uri-with-admin-database.json @@ -15,5 +15,6 @@ }, "parsed_options": { "auth_database": "adminDB" - } + }, + "ping": true } diff --git a/test/srv_seedlist/replica-set/uri-with-auth.json b/test/srv_seedlist/replica-set/uri-with-auth.json index cc7257d85b..872f997cc7 100644 --- a/test/srv_seedlist/replica-set/uri-with-auth.json +++ b/test/srv_seedlist/replica-set/uri-with-auth.json @@ -9,9 +9,14 @@ "localhost:27018", "localhost:27019" ], + "options": { + "replicaSet": "repl0", + "ssl": true + }, "parsed_options": { "user": "auser", "password": "apass" }, + "ping": false, "comment": "Should preserve auth credentials" } diff --git a/test/srv_seedlist/sharded/srvMaxHosts-equal_to_srv_records.json b/test/srv_seedlist/sharded/srvMaxHosts-equal_to_srv_records.json index 46390726f0..7d2f9a6bf8 100644 --- a/test/srv_seedlist/sharded/srvMaxHosts-equal_to_srv_records.json +++ b/test/srv_seedlist/sharded/srvMaxHosts-equal_to_srv_records.json @@ -12,5 +12,6 @@ "options": { "srvMaxHosts": 2, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/sharded/srvMaxHosts-greater_than_srv_records.json b/test/srv_seedlist/sharded/srvMaxHosts-greater_than_srv_records.json index e02d72bf28..452c7b54db 100644 --- a/test/srv_seedlist/sharded/srvMaxHosts-greater_than_srv_records.json +++ b/test/srv_seedlist/sharded/srvMaxHosts-greater_than_srv_records.json @@ -11,5 +11,6 @@ "options": { "srvMaxHosts": 3, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/sharded/srvMaxHosts-less_than_srv_records.json b/test/srv_seedlist/sharded/srvMaxHosts-less_than_srv_records.json index fdcc1692c0..cd3bf65117 100644 --- a/test/srv_seedlist/sharded/srvMaxHosts-less_than_srv_records.json +++ b/test/srv_seedlist/sharded/srvMaxHosts-less_than_srv_records.json @@ -5,5 +5,6 @@ "options": { "srvMaxHosts": 1, "ssl": true - } + }, + "ping": true } diff --git a/test/srv_seedlist/sharded/srvMaxHosts-zero.json b/test/srv_seedlist/sharded/srvMaxHosts-zero.json index 10ab9e656d..f289628c9c 100644 --- a/test/srv_seedlist/sharded/srvMaxHosts-zero.json +++ b/test/srv_seedlist/sharded/srvMaxHosts-zero.json @@ -11,5 +11,6 @@ "options": { "srvMaxHosts": 0, "ssl": true - } + }, + "ping": true } diff --git a/test/test_dns.py b/test/test_dns.py index 0fe57a4fe7..7b74eb3f7c 100644 --- a/test/test_dns.py +++ b/test/test_dns.py @@ -132,6 +132,8 @@ def run_test(self): wait_until( lambda: num_hosts == len(client.nodes), "wait to connect to num_hosts" ) + if test_case.get("ping", True): + client.admin.command("ping") # XXX: we should block until SRV poller runs at least once # and re-run these assertions. else: From b0cd7d236101d7eadf99d5215c88b08bc380951a Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 13 Nov 2023 13:59:23 -0600 Subject: [PATCH 010/221] PYTHON-4039 Handle more warnings in tests (#1427) --- pyproject.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8f5fc0741f..f70684ede7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -116,8 +116,11 @@ filterwarnings = [ "module:unclosed Date: Tue, 14 Nov 2023 12:49:42 -0800 Subject: [PATCH 011/221] PYTHON-2834 Direct read/write retries to another mongos if possible (#1421) --- pymongo/mongo_client.py | 14 ++++++++-- pymongo/topology.py | 20 +++++++++++++- test/test_retryable_reads.py | 49 +++++++++++++++++++++++++++++++++++ test/test_retryable_writes.py | 42 ++++++++++++++++++++++++++++++ test/test_topology.py | 20 +++++++++++++- test/utils.py | 6 +++++ 6 files changed, 147 insertions(+), 4 deletions(-) diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index 927bf2344b..180a60bf5c 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -1277,6 +1277,7 @@ def _select_server( server_selector: Callable[[Selection], Selection], session: Optional[ClientSession], address: Optional[_Address] = None, + deprioritized_servers: Optional[list[Server]] = None, ) -> Server: """Select a server to run an operation on this client. @@ -1300,7 +1301,9 @@ def _select_server( if not server: raise AutoReconnect("server %s:%s no longer available" % address) # noqa: UP031 else: - server = topology.select_server(server_selector) + server = topology.select_server( + server_selector, deprioritized_servers=deprioritized_servers + ) return server except PyMongoError as exc: # Server selection errors in a transaction are transient. @@ -2291,6 +2294,7 @@ def __init__( ) self._address = address self._server: Server = None # type: ignore + self._deprioritized_servers: list[Server] = [] def run(self) -> T: """Runs the supplied func() and attempts a retry @@ -2359,6 +2363,9 @@ def run(self) -> T: if self._last_error is None: self._last_error = exc + if self._client.topology_description.topology_type == TOPOLOGY_TYPE.Sharded: + self._deprioritized_servers.append(self._server) + def _is_not_eligible_for_retry(self) -> bool: """Checks if the exchange is not eligible for retry""" return not self._retryable or (self._is_retrying() and not self._multiple_retries) @@ -2397,7 +2404,10 @@ def _get_server(self) -> Server: Abstraction to connect to server """ return self._client._select_server( - self._server_selector, self._session, address=self._address + self._server_selector, + self._session, + address=self._address, + deprioritized_servers=self._deprioritized_servers, ) def _write(self) -> T: diff --git a/pymongo/topology.py b/pymongo/topology.py index 786be3ec93..092c7d92af 100644 --- a/pymongo/topology.py +++ b/pymongo/topology.py @@ -282,8 +282,10 @@ def _select_server( selector: Callable[[Selection], Selection], server_selection_timeout: Optional[float] = None, address: Optional[_Address] = None, + deprioritized_servers: Optional[list[Server]] = None, ) -> Server: servers = self.select_servers(selector, server_selection_timeout, address) + servers = _filter_servers(servers, deprioritized_servers) if len(servers) == 1: return servers[0] server1, server2 = random.sample(servers, 2) @@ -297,9 +299,12 @@ def select_server( selector: Callable[[Selection], Selection], server_selection_timeout: Optional[float] = None, address: Optional[_Address] = None, + deprioritized_servers: Optional[list[Server]] = None, ) -> Server: """Like select_servers, but choose a random server if several match.""" - server = self._select_server(selector, server_selection_timeout, address) + server = self._select_server( + selector, server_selection_timeout, address, deprioritized_servers + ) if _csot.get_timeout(): _csot.set_rtt(server.description.min_round_trip_time) return server @@ -931,3 +936,16 @@ def _is_stale_server_description(current_sd: ServerDescription, new_sd: ServerDe if current_tv["processId"] != new_tv["processId"]: return False return current_tv["counter"] > new_tv["counter"] + + +def _filter_servers( + candidates: list[Server], deprioritized_servers: Optional[list[Server]] = None +) -> list[Server]: + """Filter out deprioritized servers from a list of server candidates.""" + if not deprioritized_servers: + return candidates + + filtered = [server for server in candidates if server not in deprioritized_servers] + + # If not possible to pick a prioritized server, return the original list + return filtered or candidates diff --git a/test/test_retryable_reads.py b/test/test_retryable_reads.py index 8779ea1ed8..e3028688d7 100644 --- a/test/test_retryable_reads.py +++ b/test/test_retryable_reads.py @@ -20,6 +20,9 @@ import sys import threading +from bson import SON +from pymongo.errors import AutoReconnect + sys.path[0:0] = [""] from test import ( @@ -31,9 +34,12 @@ ) from test.utils import ( CMAPListener, + EventListener, OvertCommandListener, SpecTestCreator, + rs_client, rs_or_single_client, + set_fail_point, ) from test.utils_spec_runner import SpecRunner @@ -221,5 +227,48 @@ def test_pool_paused_error_is_retryable(self): self.assertEqual(1, len(failed), msg) +class TestRetryableReads(IntegrationTest): + @client_context.require_multiple_mongoses + @client_context.require_failCommand_fail_point + def test_retryable_reads_in_sharded_cluster_multiple_available(self): + fail_command = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": { + "failCommands": ["find"], + "closeConnection": True, + "appName": "retryableReadTest", + }, + } + + mongos_clients = [] + + for mongos in client_context.mongos_seeds().split(","): + client = rs_or_single_client(mongos) + set_fail_point(client, fail_command) + self.addCleanup(client.close) + mongos_clients.append(client) + + listener = OvertCommandListener() + client = rs_or_single_client( + client_context.mongos_seeds(), + appName="retryableReadTest", + event_listeners=[listener], + retryReads=True, + ) + + with self.fail_point(fail_command): + with self.assertRaises(AutoReconnect): + client.t.t.find_one({}) + + # Disable failpoints on each mongos + for client in mongos_clients: + fail_command["mode"] = "off" + set_fail_point(client, fail_command) + + self.assertEqual(len(listener.failed_events), 2) + self.assertEqual(len(listener.succeeded_events), 0) + + if __name__ == "__main__": unittest.main() diff --git a/test/test_retryable_writes.py b/test/test_retryable_writes.py index 2da6f53f4b..98bf0e5c94 100644 --- a/test/test_retryable_writes.py +++ b/test/test_retryable_writes.py @@ -31,6 +31,7 @@ OvertCommandListener, SpecTestCreator, rs_or_single_client, + set_fail_point, ) from test.utils_spec_runner import SpecRunner from test.version import Version @@ -40,6 +41,7 @@ from bson.raw_bson import RawBSONDocument from bson.son import SON from pymongo.errors import ( + AutoReconnect, ConnectionFailure, OperationFailure, ServerSelectionTimeoutError, @@ -469,6 +471,46 @@ def test_batch_splitting_retry_fails(self): self.assertEqual(final_txn, expected_txn) self.assertEqual(coll.find_one(projection={"_id": True}), {"_id": 1}) + @client_context.require_multiple_mongoses + @client_context.require_failCommand_fail_point + def test_retryable_writes_in_sharded_cluster_multiple_available(self): + fail_command = { + "configureFailPoint": "failCommand", + "mode": {"times": 1}, + "data": { + "failCommands": ["insert"], + "closeConnection": True, + "appName": "retryableWriteTest", + }, + } + + mongos_clients = [] + + for mongos in client_context.mongos_seeds().split(","): + client = rs_or_single_client(mongos) + set_fail_point(client, fail_command) + self.addCleanup(client.close) + mongos_clients.append(client) + + listener = OvertCommandListener() + client = rs_or_single_client( + client_context.mongos_seeds(), + appName="retryableWriteTest", + event_listeners=[listener], + retryWrites=True, + ) + + with self.assertRaises(AutoReconnect): + client.t.t.insert_one({"x": 1}) + + # Disable failpoints on each mongos + for client in mongos_clients: + fail_command["mode"] = "off" + set_fail_point(client, fail_command) + + self.assertEqual(len(listener.failed_events), 2) + self.assertEqual(len(listener.succeeded_events), 0) + class TestWriteConcernError(IntegrationTest): RUN_ON_LOAD_BALANCER = True diff --git a/test/test_topology.py b/test/test_topology.py index 3ee33afab7..418d9402da 100644 --- a/test/test_topology.py +++ b/test/test_topology.py @@ -30,11 +30,12 @@ from pymongo.monitor import Monitor from pymongo.pool import PoolOptions from pymongo.read_preferences import ReadPreference, Secondary +from pymongo.server import Server from pymongo.server_description import ServerDescription from pymongo.server_selectors import any_server_selector, writable_server_selector from pymongo.server_type import SERVER_TYPE from pymongo.settings import TopologySettings -from pymongo.topology import Topology, _ErrorContext +from pymongo.topology import Topology, _ErrorContext, _filter_servers from pymongo.topology_description import TOPOLOGY_TYPE @@ -685,6 +686,23 @@ def test_unexpected_load_balancer(self): self.assertNotIn(("a", 27017), t.description.server_descriptions()) self.assertEqual(t.description.topology_type_name, "Unknown") + def test_filtered_server_selection(self): + s1 = Server(ServerDescription(("localhost", 27017)), pool=object(), monitor=object()) # type: ignore[arg-type] + s2 = Server(ServerDescription(("localhost2", 27017)), pool=object(), monitor=object()) # type: ignore[arg-type] + servers = [s1, s2] + + result = _filter_servers(servers, deprioritized_servers=[s2]) + self.assertEqual(result, [s1]) + + result = _filter_servers(servers, deprioritized_servers=[s1, s2]) + self.assertEqual(result, servers) + + result = _filter_servers(servers, deprioritized_servers=[]) + self.assertEqual(result, servers) + + result = _filter_servers(servers) + self.assertEqual(result, servers) + def wait_for_primary(topology): """Wait for a Topology to discover a writable server. diff --git a/test/utils.py b/test/utils.py index e98016ac72..08d2f1128b 100644 --- a/test/utils.py +++ b/test/utils.py @@ -1153,3 +1153,9 @@ def prepare_spec_arguments(spec, arguments, opname, entity_map, with_txn_callbac raise AssertionError(f"Unsupported cursorType: {cursor_type}") else: arguments[c2s] = arguments.pop(arg_name) + + +def set_fail_point(client, command_args): + cmd = SON([("configureFailPoint", "failCommand")]) + cmd.update(command_args) + client.admin.command(cmd) From 0ff6a87438fd9a324a8bfee198f9906183cc51d5 Mon Sep 17 00:00:00 2001 From: Jib Date: Wed, 15 Nov 2023 15:03:36 -0500 Subject: [PATCH 012/221] PYTHON-4038: Ensure retryable read `OperationFailure`s re-raise exception when 0 or NoneType error code is provided. (#1425) --- pymongo/mongo_client.py | 3 ++- test/mockupdb/test_cursor.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index 180a60bf5c..e8af251597 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -2333,7 +2333,8 @@ def run(self) -> T: # ConnectionFailures do not supply a code property exc_code = getattr(exc, "code", None) if self._is_not_eligible_for_retry() or ( - exc_code and exc_code not in helpers._RETRYABLE_ERROR_CODES + isinstance(exc, OperationFailure) + and exc_code not in helpers._RETRYABLE_ERROR_CODES ): raise self._retrying = True diff --git a/test/mockupdb/test_cursor.py b/test/mockupdb/test_cursor.py index 1cf3a05ed5..96a7e17053 100644 --- a/test/mockupdb/test_cursor.py +++ b/test/mockupdb/test_cursor.py @@ -16,11 +16,13 @@ from __future__ import annotations import unittest +from test import PyMongoTestCase from mockupdb import MockupDB, OpMsg, going from bson.objectid import ObjectId from pymongo import MongoClient +from pymongo.errors import OperationFailure class TestCursor(unittest.TestCase): @@ -57,5 +59,31 @@ def test_getmore_load_balanced(self): request.replies({"cursor": {"id": cursor_id, "nextBatch": [{}]}}) +class TestRetryableErrorCodeCatch(PyMongoTestCase): + def _test_fail_on_operation_failure_with_code(self, code): + """Test reads on error codes that should not be retried""" + server = MockupDB() + server.run() + self.addCleanup(server.stop) + server.autoresponds("ismaster", maxWireVersion=6) + + client = MongoClient(server.uri) + + with going(lambda: server.receives(OpMsg({"find": "collection"})).command_err(code=code)): + cursor = client.db.collection.find() + with self.assertRaises(OperationFailure) as ctx: + cursor.next() + self.assertEqual(ctx.exception.code, code) + + def test_fail_on_operation_failure_none(self): + self._test_fail_on_operation_failure_with_code(None) + + def test_fail_on_operation_failure_zero(self): + self._test_fail_on_operation_failure_with_code(0) + + def test_fail_on_operation_failure_one(self): + self._test_fail_on_operation_failure_with_code(1) + + if __name__ == "__main__": unittest.main() From 9c45ef808a7e3e69479fb2edf664e1d4adc7b02f Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 16 Nov 2023 09:50:58 -0600 Subject: [PATCH 013/221] PYTHON-4043 Add changelog entry for 4.6.1 (#1433) --- doc/changelog.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/changelog.rst b/doc/changelog.rst index e04452e483..4a2a634f41 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,6 +1,13 @@ Changelog ========= +Changes in Version 4.6.1 +------------------------ + +PyMongo 4.6.1 fixes the following bug: + +- Ensure retryable read ``OperationFailure`` errors re-raise exception when 0 or NoneType error code is provided. + Changes in Version 4.6 ---------------------- From 134b7efaec4cf2b613d3cec44d541d88635a33fa Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 16 Nov 2023 10:56:35 -0600 Subject: [PATCH 014/221] PYTHON-4043 [v4.6] Add changelog entry for 4.6.1 (#1435) From 2ac7f0d88d6856f6fc2ba88567627851e528956d Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 16 Nov 2023 12:50:13 -0800 Subject: [PATCH 015/221] PYTHON-3823 Migrate perf testing to rhel90-dbx-perf-large, Python 3.10.4, MongoDB 6.0.6 (#1431) --- .evergreen/perf.yml | 29 +++-------------------------- .evergreen/run-perf-tests.sh | 2 +- 2 files changed, 4 insertions(+), 27 deletions(-) diff --git a/.evergreen/perf.yml b/.evergreen/perf.yml index bd5a860477..e9a193cd16 100644 --- a/.evergreen/perf.yml +++ b/.evergreen/perf.yml @@ -67,6 +67,7 @@ functions: PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" PREPARE_SHELL: | set -o errexit + export SKIP_LEGACY_SHELL=1 export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" @@ -198,34 +199,12 @@ post: - func: "cleanup" tasks: - - name: "perf-4.0-standalone" - tags: ["perf"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.0" - TOPOLOGY: "server" - - func: "run perf tests" - - func: "attach benchmark test results" - - func: "send dashboard data" - - - name: "perf-4.4-standalone" - tags: ["perf"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.4" - TOPOLOGY: "server" - - func: "run perf tests" - - func: "attach benchmark test results" - - func: "send dashboard data" - - name: "perf-6.0-standalone" tags: ["perf"] commands: - func: "bootstrap mongo-orchestration" vars: - VERSION: "6.0" + VERSION: "v6.0-perf" TOPOLOGY: "server" - func: "run perf tests" - func: "attach benchmark test results" @@ -236,8 +215,6 @@ buildvariants: - name: "perf-tests" display_name: "Performance Benchmark Tests" batchtime: 10080 # 7 days - run_on: ubuntu2004-large + run_on: rhel90-dbx-perf-large tasks: - - name: "perf-4.0-standalone" - - name: "perf-4.4-standalone" - name: "perf-6.0-standalone" diff --git a/.evergreen/run-perf-tests.sh b/.evergreen/run-perf-tests.sh index 72be38e03d..6363f4e430 100644 --- a/.evergreen/run-perf-tests.sh +++ b/.evergreen/run-perf-tests.sh @@ -13,7 +13,7 @@ cd .. export TEST_PATH="${PROJECT_DIRECTORY}/driver-performance-test-data" export OUTPUT_FILE="${PROJECT_DIRECTORY}/results.json" -export PYTHON_BINARY=/opt/mongodbtoolchain/v3/bin/python3 +export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 export PERF_TEST=1 bash ./.evergreen/tox.sh -m test-eg From fc220532df094efdd24bd4024e83d45caf33df50 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Fri, 17 Nov 2023 10:35:46 -0800 Subject: [PATCH 016/221] PYTHON-3823 Merge perf task into main project (#1436) --- .evergreen/config.yml | 37 ++++++ .evergreen/perf.yml | 220 ---------------------------------- .evergreen/run-perf-tests.sh | 8 +- test/performance/perf_test.py | 7 +- 4 files changed, 45 insertions(+), 227 deletions(-) delete mode 100644 .evergreen/perf.yml diff --git a/.evergreen/config.yml b/.evergreen/config.yml index b3ea323100..46adaf4e74 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -905,6 +905,25 @@ functions: content_type: ${content_type|application/gzip} display_name: Release files all + "run perf tests": + - command: shell.exec + type: test + params: + working_dir: "src" + script: | + ${PREPARE_SHELL} + PROJECT_DIRECTORY=${PROJECT_DIRECTORY} bash ${PROJECT_DIRECTORY}/.evergreen/run-perf-tests.sh + + "attach benchmark test results": + - command: attach.results + params: + file_location: src/report.json + + "send dashboard data": + - command: perf.send + params: + file: src/results.json + pre: - func: "fetch source" - func: "prepare resources" @@ -2154,6 +2173,17 @@ tasks: SUCCESS=false TEST_FLE_AZURE_AUTO=1 \ ./.evergreen/tox.sh -m test-eg + - name: "perf-6.0-standalone" + tags: ["perf"] + commands: + - func: "bootstrap mongo-orchestration" + vars: + VERSION: "v6.0-perf" + TOPOLOGY: "server" + - func: "run perf tests" + - func: "attach benchmark test results" + - func: "send dashboard data" + axes: # Choice of distro - id: platform @@ -3117,6 +3147,13 @@ buildvariants: tasks: - ".release_tag" +- name: "perf-tests" + display_name: "Performance Benchmark Tests" + batchtime: 10080 # 7 days + run_on: rhel90-dbx-perf-large + tasks: + - name: "perf-6.0-standalone" + # Platform notes # i386 builds of OpenSSL or Cyrus SASL are not available # Debian 8.1 only supports MongoDB 3.4+ diff --git a/.evergreen/perf.yml b/.evergreen/perf.yml deleted file mode 100644 index e9a193cd16..0000000000 --- a/.evergreen/perf.yml +++ /dev/null @@ -1,220 +0,0 @@ -######################################## -# Evergreen Template for MongoDB Drivers -######################################## - -# When a task that used to pass starts to fail -# Go through all versions that may have been skipped to detect -# when the task started failing -stepback: true - -# Mark a failure as a system/bootstrap failure (purple box) rather then a task -# failure by default. -# Actual testing tasks are marked with `type: test` -command_type: system - -# Protect ourself against rogue test case, or curl gone wild, that runs forever -# Good rule of thumb: the averageish length a task takes, times 5 -# That roughly accounts for variable system performance for various buildvariants -exec_timeout_secs: 3600 # 60 minutes is the longest we'll ever run - -# What to do when evergreen hits the timeout (`post:` tasks are run automatically) -timeout: - - command: shell.exec - params: - script: | - ls -la - -functions: - "fetch source": - # Executes clone and applies the submitted patch, if any - - command: git.get_project - params: - directory: "src" - # Applies the subitted patch, if any - # Deprecated. Should be removed. But still needed for certain agents (ZAP) - - command: git.apply_patch - # Make an evergreen exapanstion file with dynamic values - - command: shell.exec - params: - working_dir: "src" - script: | - # Get the current unique version of this checkout - if [ "${is_patch}" = "true" ]; then - CURRENT_VERSION=$(git describe)-patch-${version_id} - else - CURRENT_VERSION=latest - fi - - export DRIVERS_TOOLS="$(pwd)/../drivers-tools" - export PROJECT_DIRECTORY="$(pwd)" - - # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory - if [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin - export DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) - export PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) - fi - - export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" - export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" - export UPLOAD_BUCKET="${project}" - - cat < expansion.yml - CURRENT_VERSION: "$CURRENT_VERSION" - DRIVERS_TOOLS: "$DRIVERS_TOOLS" - MONGO_ORCHESTRATION_HOME: "$MONGO_ORCHESTRATION_HOME" - MONGODB_BINARIES: "$MONGODB_BINARIES" - UPLOAD_BUCKET: "$UPLOAD_BUCKET" - PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" - PREPARE_SHELL: | - set -o errexit - export SKIP_LEGACY_SHELL=1 - export DRIVERS_TOOLS="$DRIVERS_TOOLS" - export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" - export MONGODB_BINARIES="$MONGODB_BINARIES" - export UPLOAD_BUCKET="$UPLOAD_BUCKET" - export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" - - export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" - export PATH="$MONGODB_BINARIES:$PATH" - export PROJECT="${project}" - EOT - # See what we've done - cat expansion.yml - - # Load the expansion file to make an evergreen variable with the current unique version - - command: expansions.update - params: - file: src/expansion.yml - - "prepare resources": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - rm -rf $DRIVERS_TOOLS - if [ "${project}" = "drivers-tools" ]; then - # If this was a patch build, doing a fresh clone would not actually test the patch - cp -R ${PROJECT_DIRECTORY}/ $DRIVERS_TOOLS - else - git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS - fi - echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config - - "bootstrap mongo-orchestration": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - MONGODB_VERSION=${VERSION} TOPOLOGY=${TOPOLOGY} AUTH=${AUTH} SSL=${SSL} STORAGE_ENGINE=${STORAGE_ENGINE} bash ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh - # run-orchestration generates expansion file with the MONGODB_URI for the cluster - - command: expansions.update - params: - file: mo-expansion.yml - - "stop mongo-orchestration": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - bash ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh - - "run perf tests": - - command: shell.exec - type: test - params: - working_dir: "src" - script: | - ${PREPARE_SHELL} - PROJECT_DIRECTORY=${PROJECT_DIRECTORY} bash ${PROJECT_DIRECTORY}/.evergreen/run-perf-tests.sh - - "attach benchmark test results": - - command: attach.results - params: - file_location: src/report.json - - "send dashboard data": - - command: perf.send - params: - file: src/results.json - - "cleanup": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - rm -rf $DRIVERS_TOOLS || true - - "fix absolute paths": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - for filename in $(find ${DRIVERS_TOOLS} -name \*.json); do - perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|${DRIVERS_TOOLS}|g" $filename - done - - "windows fix": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - for i in $(find ${DRIVERS_TOOLS}/.evergreen ${PROJECT_DIRECTORY}/.evergreen -name \*.sh); do - cat $i | tr -d '\r' > $i.new - mv $i.new $i - done - # Copy client certificate because symlinks do not work on Windows. - cp ${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem - - "make files executable": - - command: shell.exec - params: - script: | - ${PREPARE_SHELL} - for i in $(find ${DRIVERS_TOOLS}/.evergreen ${PROJECT_DIRECTORY}/.evergreen -name \*.sh); do - chmod +x $i - done - - "install dependencies": - - command: shell.exec - params: - working_dir: "src" - script: | - ${PREPARE_SHELL} - file="${PROJECT_DIRECTORY}/.evergreen/install-dependencies.sh" - # Don't use ${file} syntax here because evergreen treats it as an empty expansion. - [ -f "$file" ] && bash $file || echo "$file not available, skipping" - -pre: - - func: "fetch source" - - func: "prepare resources" - # We don't run perf on Windows (yet) - #- func: "windows fix" - - func: "fix absolute paths" - - func: "make files executable" - # We're not testing with TLS (yet) - #- func: "install dependencies" - -post: - - func: "stop mongo-orchestration" - - func: "cleanup" - -tasks: - - name: "perf-6.0-standalone" - tags: ["perf"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "v6.0-perf" - TOPOLOGY: "server" - - func: "run perf tests" - - func: "attach benchmark test results" - - func: "send dashboard data" - -buildvariants: - -- name: "perf-tests" - display_name: "Performance Benchmark Tests" - batchtime: 10080 # 7 days - run_on: rhel90-dbx-perf-large - tasks: - - name: "perf-6.0-standalone" diff --git a/.evergreen/run-perf-tests.sh b/.evergreen/run-perf-tests.sh index 6363f4e430..9d6afacf7b 100644 --- a/.evergreen/run-perf-tests.sh +++ b/.evergreen/run-perf-tests.sh @@ -3,14 +3,14 @@ set -o xtrace set -o errexit -git clone https://github.com/mongodb-labs/driver-performance-test-data.git -cd driver-performance-test-data +git clone --depth 1 https://github.com/mongodb/specifications.git +pushd specifications/source/benchmarking/data tar xf extended_bson.tgz tar xf parallel.tgz tar xf single_and_multi_document.tgz -cd .. +popd -export TEST_PATH="${PROJECT_DIRECTORY}/driver-performance-test-data" +export TEST_PATH="${PROJECT_DIRECTORY}/specifications/source/benchmarking/data" export OUTPUT_FILE="${PROJECT_DIRECTORY}/results.json" export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 diff --git a/test/performance/perf_test.py b/test/performance/perf_test.py index 109fff3c14..6aabb595e1 100644 --- a/test/performance/perf_test.py +++ b/test/performance/perf_test.py @@ -83,9 +83,10 @@ def setUp(self): pass def tearDown(self): - name = self.__class__.__name__ + # Remove "Test" so that TestFlatEncoding is reported as "FlatEncoding". + name = self.__class__.__name__[4:] median = self.percentile(50) - bytes_per_sec = self.data_size / median + megabytes_per_sec = self.data_size / median / 1000000 print(f"Running {self.__class__.__name__}. MEDIAN={self.percentile(50)}") result_data.append( { @@ -96,7 +97,7 @@ def tearDown(self): }, }, "metrics": [ - {"name": "bytes_per_sec", "value": bytes_per_sec}, + {"name": "megabytes_per_sec", "type": "MEDIAN", "value": megabytes_per_sec}, ], } ) From ec35f7f76e5b7a7f4039de37e0f51bdbcc8c2fdc Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Fri, 17 Nov 2023 12:07:33 -0800 Subject: [PATCH 017/221] PYTHON-3823 Audit benchmark data_size and calculate dynamically it where possible (#1439) --- test/performance/perf_test.py | 144 ++++++++++++---------------------- 1 file changed, 51 insertions(+), 93 deletions(-) diff --git a/test/performance/perf_test.py b/test/performance/perf_test.py index 6aabb595e1..ec3fb0bd4f 100644 --- a/test/performance/perf_test.py +++ b/test/performance/perf_test.py @@ -21,7 +21,7 @@ import tempfile import time import warnings -from typing import Any, List +from typing import Any, List, Optional try: import simplejson as json @@ -70,9 +70,8 @@ def __exit__(self, *args): class PerformanceTest: - dataset: Any - data_size: Any - do_task: Any + dataset: str + data_size: int fail: Any @classmethod @@ -87,7 +86,9 @@ def tearDown(self): name = self.__class__.__name__[4:] median = self.percentile(50) megabytes_per_sec = self.data_size / median / 1000000 - print(f"Running {self.__class__.__name__}. MEDIAN={self.percentile(50)}") + print( + f"Running {self.__class__.__name__}. MB/s={megabytes_per_sec}, MEDIAN={self.percentile(50)}" + ) result_data.append( { "info": { @@ -105,6 +106,9 @@ def tearDown(self): def before(self): pass + def do_task(self): + raise NotImplementedError + def after(self): pass @@ -120,12 +124,13 @@ def percentile(self, percentile): def runTest(self): results = [] start = time.monotonic() - self.max_iterations = NUM_ITERATIONS for i in range(NUM_ITERATIONS): if time.monotonic() - start > MAX_ITERATION_TIME: with warnings.catch_warnings(): warnings.simplefilter("default") - warnings.warn("Test timed out, completed %s iterations." % i) + warnings.warn( + f"Test timed out after {MAX_ITERATION_TIME}s, completed {i}/{NUM_ITERATIONS} iterations." + ) break self.before() with Timer() as timer: @@ -142,6 +147,7 @@ def setUp(self): # Location of test data. with open(os.path.join(TEST_PATH, os.path.join("extended_bson", self.dataset))) as data: self.document = loads(data.read()) + self.data_size = len(encode(self.document)) * NUM_DOCS def do_task(self): for _ in range(NUM_DOCS): @@ -154,6 +160,8 @@ def setUp(self): with open(os.path.join(TEST_PATH, os.path.join("extended_bson", self.dataset))) as data: self.document = encode(json.loads(data.read())) + self.data_size = len(self.document) * NUM_DOCS + def do_task(self): for _ in range(NUM_DOCS): decode(self.document) @@ -161,37 +169,31 @@ def do_task(self): class TestFlatEncoding(BsonEncodingTest, unittest.TestCase): dataset = "flat_bson.json" - data_size = 75310000 class TestFlatDecoding(BsonDecodingTest, unittest.TestCase): dataset = "flat_bson.json" - data_size = 75310000 class TestDeepEncoding(BsonEncodingTest, unittest.TestCase): dataset = "deep_bson.json" - data_size = 19640000 class TestDeepDecoding(BsonDecodingTest, unittest.TestCase): dataset = "deep_bson.json" - data_size = 19640000 class TestFullEncoding(BsonEncodingTest, unittest.TestCase): dataset = "full_bson.json" - data_size = 57340000 class TestFullDecoding(BsonDecodingTest, unittest.TestCase): dataset = "full_bson.json" - data_size = 57340000 # SINGLE-DOC BENCHMARKS class TestRunCommand(PerformanceTest, unittest.TestCase): - data_size = 160000 + data_size = len(encode({"hello": True})) * NUM_DOCS def setUp(self): self.client = client_context.client @@ -200,7 +202,7 @@ def setUp(self): def do_task(self): command = self.client.perftest.command for _ in range(NUM_DOCS): - command("ping") + command("hello", True) class TestDocument(PerformanceTest): @@ -225,23 +227,17 @@ def after(self): self.client.perftest.drop_collection("corpus") -class TestFindOneByID(TestDocument, unittest.TestCase): - data_size = 16220000 +class FindTest(TestDocument): + dataset = "tweet.json" def setUp(self): - self.dataset = "tweet.json" super().setUp() - + self.data_size = len(encode(self.document)) * NUM_DOCS documents = [self.document.copy() for _ in range(NUM_DOCS)] self.corpus = self.client.perftest.corpus result = self.corpus.insert_many(documents) self.inserted_ids = result.inserted_ids - def do_task(self): - find_one = self.corpus.find_one - for _id in self.inserted_ids: - find_one({"_id": _id}) - def before(self): pass @@ -249,30 +245,40 @@ def after(self): pass -class TestSmallDocInsertOne(TestDocument, unittest.TestCase): - data_size = 2750000 +class TestFindOneByID(FindTest, unittest.TestCase): + def do_task(self): + find_one = self.corpus.find_one + for _id in self.inserted_ids: + find_one({"_id": _id}) + + +class SmallDocInsertTest(TestDocument): + dataset = "small_doc.json" def setUp(self): - self.dataset = "small_doc.json" super().setUp() - + self.data_size = len(encode(self.document)) * NUM_DOCS self.documents = [self.document.copy() for _ in range(NUM_DOCS)] + +class TestSmallDocInsertOne(SmallDocInsertTest, unittest.TestCase): def do_task(self): insert_one = self.corpus.insert_one for doc in self.documents: insert_one(doc) -class TestLargeDocInsertOne(TestDocument, unittest.TestCase): - data_size = 27310890 +class LargeDocInsertTest(TestDocument): + dataset = "large_doc.json" def setUp(self): - self.dataset = "large_doc.json" super().setUp() + n_docs = 10 + self.data_size = len(encode(self.document)) * n_docs + self.documents = [self.document.copy() for _ in range(n_docs)] - self.documents = [self.document.copy() for _ in range(10)] +class TestLargeDocInsertOne(LargeDocInsertTest, unittest.TestCase): def do_task(self): insert_one = self.corpus.insert_one for doc in self.documents: @@ -280,61 +286,24 @@ def do_task(self): # MULTI-DOC BENCHMARKS -class TestFindManyAndEmptyCursor(TestDocument, unittest.TestCase): - data_size = 16220000 - - def setUp(self): - self.dataset = "tweet.json" - super().setUp() - - for _ in range(10): - self.client.perftest.command("insert", "corpus", documents=[self.document] * 1000) - self.corpus = self.client.perftest.corpus - +class TestFindManyAndEmptyCursor(FindTest, unittest.TestCase): def do_task(self): list(self.corpus.find()) - def before(self): - pass - - def after(self): - pass - - -class TestSmallDocBulkInsert(TestDocument, unittest.TestCase): - data_size = 2750000 - - def setUp(self): - self.dataset = "small_doc.json" - super().setUp() - self.documents = [self.document.copy() for _ in range(NUM_DOCS)] - - def before(self): - self.corpus = self.client.perftest.create_collection("corpus") +class TestSmallDocBulkInsert(SmallDocInsertTest, unittest.TestCase): def do_task(self): self.corpus.insert_many(self.documents, ordered=True) -class TestLargeDocBulkInsert(TestDocument, unittest.TestCase): - data_size = 27310890 - - def setUp(self): - self.dataset = "large_doc.json" - super().setUp() - self.documents = [self.document.copy() for _ in range(10)] - - def before(self): - self.corpus = self.client.perftest.create_collection("corpus") - +class TestLargeDocBulkInsert(LargeDocInsertTest, unittest.TestCase): def do_task(self): self.corpus.insert_many(self.documents, ordered=True) -class TestGridFsUpload(PerformanceTest, unittest.TestCase): - data_size = 52428800 - +class GridFsTest(PerformanceTest): def setUp(self): + super().setUp() self.client = client_context.client self.client.drop_database("perftest") @@ -343,44 +312,33 @@ def setUp(self): ) with open(gridfs_path, "rb") as data: self.document = data.read() - + self.data_size = len(self.document) self.bucket = GridFSBucket(self.client.perftest) def tearDown(self): super().tearDown() self.client.drop_database("perftest") + +class TestGridFsUpload(GridFsTest, unittest.TestCase): def before(self): + # Create the bucket. self.bucket.upload_from_stream("init", b"x") def do_task(self): self.bucket.upload_from_stream("gridfstest", self.document) -class TestGridFsDownload(PerformanceTest, unittest.TestCase): - data_size = 52428800 - +class TestGridFsDownload(GridFsTest, unittest.TestCase): def setUp(self): - self.client = client_context.client - self.client.drop_database("perftest") - - gridfs_path = os.path.join( - TEST_PATH, os.path.join("single_and_multi_document", "gridfs_large.bin") - ) - - self.bucket = GridFSBucket(self.client.perftest) - with open(gridfs_path, "rb") as gfile: - self.uploaded_id = self.bucket.upload_from_stream("gridfstest", gfile) - - def tearDown(self): - super().tearDown() - self.client.drop_database("perftest") + super().setUp() + self.uploaded_id = self.bucket.upload_from_stream("gridfstest", self.document) def do_task(self): self.bucket.open_download_stream(self.uploaded_id).read() -proc_client = None +proc_client: Optional[MongoClient] = None def proc_init(*dummy): From 51f7fe29f6c763158229ae3adf8855d5c7350d1f Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 21 Nov 2023 14:24:49 -0600 Subject: [PATCH 018/221] PYTHON-4047 Convert top level docs files to Markdown (#1432) --- CONTRIBUTING.md | 224 +++++++++++++++++++++++++++++++++++++++++++++ CONTRIBUTING.rst | 171 ----------------------------------- MANIFEST.in | 6 +- README.md | 229 +++++++++++++++++++++++++++++++++++++++++++++++ README.rst | 212 ------------------------------------------- RELEASE.md | 89 ++++++++++++++++++ RELEASE.rst | 90 ------------------- pyproject.toml | 2 +- 8 files changed, 546 insertions(+), 477 deletions(-) create mode 100644 CONTRIBUTING.md delete mode 100644 CONTRIBUTING.rst create mode 100644 README.md delete mode 100644 README.rst create mode 100644 RELEASE.md delete mode 100644 RELEASE.rst diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..bf25f0af49 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,224 @@ +# Contributing to PyMongo + +PyMongo has a large +[community](https://pymongo.readthedocs.io/en/stable/contributors.html) +and contributions are always encouraged. Contributions can be as simple +as minor tweaks to the documentation. Please read these guidelines +before sending a pull request. + +## Bugfixes and New Features + +Before starting to write code, look for existing +[tickets](https://jira.mongodb.org/browse/PYTHON) or [create +one](https://jira.mongodb.org/browse/PYTHON) for your specific issue or +feature request. That way you avoid working on something that might not +be of interest or that has already been addressed. + +## Supported Interpreters + +PyMongo supports CPython 3.7+ and PyPy3.8+. Language features not +supported by all interpreters can not be used. + +## Style Guide + +PyMongo follows [PEP8](http://www.python.org/dev/peps/pep-0008/) +including 4 space indents and 79 character line limits. + +## General Guidelines + +- Avoid backward breaking changes if at all possible. +- Write inline documentation for new classes and methods. +- Write tests and make sure they pass (make sure you have a mongod + running on the default port, then execute `tox -e test` from the cmd + line to run the test suite). +- Add yourself to doc/contributors.rst `:)` + +## Authoring a Pull Request + +**Our Pull Request Policy is based on this** [Code Review Developer +Guide](https://google.github.io/eng-practices/review) + +The expectation for any code author is to provide all the context needed +in the space of a pull request for any engineer to feel equipped to +review the code. Depending on the type of change, do your best to +highlight important new functions or objects you've introduced in the +code; think complex functions or new abstractions. Whilst it may seem +like more work for you to adjust your pull request, the reality is your +likelihood for getting review sooner shoots up. + +**Self Review Guidelines to follow** + +- If the PR is too large, split it if possible. + + - Use 250 LoC (excluding test data and config changes) as a + rule-of-thumb. + + - Moving and changing code should be in separate PRs or commits. + + - Moving: Taking large code blobs and transplanting + them to another file. There\'s generally no (or very + little) actual code changed other than a cut and + paste. It can even be extended to large deletions. + - Changing: Adding code changes (be that refactors or + functionality additions/subtractions). + - These two, when mixed, can muddy understanding and + sometimes make it harder for reviewers to keep track + of things. + +- Prefer explaining with code comments instead of PR comments. + +**Provide background** + +- The PR description and linked tickets should answer the "what" and + "why" of the change. The code change explains the "how". + +**Follow the Template** + +- Please do not deviate from the template we make; it is there for a + lot of reasons. If it is a one line fix, we still need to have + context on what and why it is needed. + +- If making a versioning change, please let that be known. See examples below: + + - `versionadded:: 3.11` + - `versionchanged:: 3.5` + +**Pull Request Template Breakdown** + +- **Github PR Title** + + - The PR Title format should always be + `[JIRA-ID] : Jira Title or Blurb Summary`. + +- **JIRA LINK** + +- Convenient link to the associated JIRA ticket. + +- **Summary** + + - Small blurb on why this is needed. The JIRA task should have + the more in-depth description, but this should still, at a + high level, give anyone looking an understanding of why the + PR has been checked in. + +- **Changes in this PR** + + - The explicit code changes that this PR is introducing. This + should be more specific than just the task name. (Unless the + task name is very clear). + +- **Test Plan** + + - Everything needs a test description. Describe what you did + to validate your changes actually worked; if you did + nothing, then document you did not test it. Aim to make + these steps reproducible by other engineers, specifically + with your primary reviewer in mind. + +- **Screenshots** + + - Any images that provide more context to the PR. Usually, + these just coincide with the test plan. + +- **Callouts or follow-up items** + + - This is a good place for identifying "to-dos" that you've + placed in the code (Must have an accompanying JIRA Ticket). + - Potential bugs that you are unsure how to test in the code. + - Opinions you want to receive about your code. + +## Running Linters + +PyMongo uses [pre-commit](https://pypi.org/project/pre-commit/) for +managing linting of the codebase. `pre-commit` performs various checks +on all files in PyMongo and uses tools that help follow a consistent +code style within the codebase. + +To set up `pre-commit` locally, run: + +```bash +brew install pre-commit +pre-commit install +``` + +To run `pre-commit` manually, run: + +```bash +pre-commit run --all-files +``` + +To run a manual hook like `mypy` manually, run: + +```bash +pre-commit run --all-files --hook-stage manual mypy +``` + +Typically we use `tox` to run the linters, e.g. + +```bash +tox -e typecheck-mypy +tox -e lint-manual +``` + +## Documentation + +To contribute to the [API +documentation](https://pymongo.readthedocs.io/en/stable/) just make your +changes to the inline documentation of the appropriate [source +code](https://github.com/mongodb/mongo-python-driver) or [rst +file](https://github.com/mongodb/mongo-python-driver/tree/master/doc) in +a branch and submit a [pull +request](https://help.github.com/articles/using-pull-requests). You +might also use the GitHub +[Edit](https://github.com/blog/844-forking-with-the-edit-button) button. + +You can build the documentation locally by running: + +```bash +tox -e doc +``` + +## Running Tests Locally + +- Ensure you have started the appropriate Mongo Server(s). +- Run `pip install tox` to use `tox` for testing or run + `pip install -e ".[test]"` to run `pytest` directly. +- Run `tox -m test` or `pytest` to run all of the tests. +- Append `test/.py::::` to run + specific tests. You can omit the `` to test a full class + and the `` to test a full module. For example: + `tox -m test test/test_change_stream.py::TestUnifiedChangeStreamsErrors::test_change_stream_errors_on_ElectionInProgress`. +- Use the `-k` argument to select tests by pattern. + +## Running Load Balancer Tests Locally + +- Install `haproxy` (available as `brew install haproxy` on macOS). +- Clone `drivers-evergreen-tools`: + `git clone git@github.com:mongodb-labs/drivers-evergreen-tools.git`. +- Start the servers using + `LOAD_BALANCER=true TOPOLOGY=sharded_cluster AUTH=noauth SSL=nossl MONGODB_VERSION=6.0 DRIVERS_TOOLS=$PWD/drivers-evergreen-tools MONGO_ORCHESTRATION_HOME=$PWD/drivers-evergreen-tools/.evergreen/orchestration $PWD/drivers-evergreen-tools/.evergreen/run-orchestration.sh`. +- Start the load balancer using: + `MONGODB_URI='mongodb://localhost:27017,localhost:27018/' $PWD/drivers-evergreen-tools/.evergreen/run-load-balancer.sh start`. +- Run the tests from the `pymongo` checkout directory using: + `TEST_LOADBALANCER=1 tox -m test-eg`. + +## Re-sync Spec Tests + +If you would like to re-sync the copy of the specification tests in the +PyMongo repository with that which is inside the [specifications +repo](https://github.com/mongodb/specifications), please use the script +provided in `.evergreen/resync-specs.sh`.: + +```bash +git clone git@github.com:mongodb/specifications.git +export MDB_SPECS=~/specifications +cd ~/mongo-python-driver/.evergreen +./resync-specs.sh -b "" spec1 spec2 ... +./resync-specs.sh -b "connection-string*" crud bson-corpus # Updates crud and bson-corpus specs while ignoring all files with the regex "connection-string*" +cd .. +``` + +The `-b` flag adds as a regex pattern to block files you do not wish to +update in PyMongo. This is primarily helpful if you are implementing a +new feature in PyMongo that has spec tests already implemented, or if +you are attempting to validate new spec tests in PyMongo. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index 07d6f1d77c..0000000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,171 +0,0 @@ -Contributing to PyMongo -======================= - -PyMongo has a large `community -`_ and -contributions are always encouraged. Contributions can be as simple as -minor tweaks to the documentation. Please read these guidelines before -sending a pull request. - -Bugfixes and New Features -------------------------- - -Before starting to write code, look for existing `tickets -`_ or `create one -`_ for your specific -issue or feature request. That way you avoid working on something -that might not be of interest or that has already been addressed. - -Supported Interpreters ----------------------- - -PyMongo supports CPython 3.7+ and PyPy3.8+. Language -features not supported by all interpreters can not be used. - -Style Guide ------------ - -PyMongo follows `PEP8 `_ -including 4 space indents and 79 character line limits. - -General Guidelines ------------------- - -- Avoid backward breaking changes if at all possible. -- Write inline documentation for new classes and methods. -- Write tests and make sure they pass (make sure you have a mongod - running on the default port, then execute ``tox -m test`` - from the cmd line to run the test suite). -- Add yourself to doc/contributors.rst :) - -Authoring a Pull Request ------------------------- - -**Our Pull Request Policy is based on this** `Code Review Developer Guide `_ - -The expectation for any code author is to provide all the context needed in the space of a -pull request for any engineer to feel equipped to review the code. Depending on the type of -change, do your best to highlight important new functions or objects you’ve introduced in the -code; think complex functions or new abstractions. Whilst it may seem like more work for you to -adjust your pull request, the reality is your likelihood for getting review sooner shoots -up. - -**Self Review Guidelines to follow** - -- If the PR is too large, split it if possible. - - Use 250 LoC (excluding test data and config changes) as a rule-of-thumb. - - Moving and changing code should be in separate PRs or commits. - - Moving: Taking large code blobs and transplanting them to another file. There's generally no (or very little) actual code changed other than a cut and paste. It can even be extended to large deletions. - - Changing: Adding code changes (be that refactors or functionality additions/subtractions). - - These two, when mixed, can muddy understanding and sometimes make it harder for reviewers to keep track of things. -- Prefer explaining with code comments instead of PR comments. - -**Provide background** - -- The PR description and linked tickets should answer the "what" and "why" of the change. The code change explains the "how". - -**Follow the Template** - -- Please do not deviate from the template we make; it is there for a lot of reasons. If it is a one line fix, we still need to have context on what and why it is needed. -- If making a versioning change, please let that be known. See examples below: - - ``versionadded:: 3.11`` - - ``versionchanged:: 3.5`` - - -**Pull Request Template Breakdown** - -- **Github PR Title** - - The PR Title format should always be ``[JIRA-ID] : Jira Title or Blurb Summary``. - -- **JIRA LINK** - - Convenient link to the associated JIRA ticket. - -- **Summary** - - Small blurb on why this is needed. The JIRA task should have the more in-depth description, but this should still, at a high level, give anyone looking an understanding of why the PR has been checked in. - -- **Changes in this PR** - - The explicit code changes that this PR is introducing. This should be more specific than just the task name. (Unless the task name is very clear). - -- **Test Plan** - - Everything needs a test description. Describe what you did to validate your changes actually worked; if you did nothing, then document you did not test it. Aim to make these steps reproducible by other engineers, specifically with your primary reviewer in mind. - -- **Screenshots** - - Any images that provide more context to the PR. Usually, these just coincide with the test plan. - -- **Callouts or follow-up items** - - This is a good place for identifying “to-dos” that you’ve placed in the code (Must have an accompanying JIRA Ticket). - - Potential bugs that you are unsure how to test in the code. - - Opinions you want to receive about your code. - - -Running Linters ---------------- - -PyMongo uses `pre-commit `_ -for managing linting of the codebase. -``pre-commit`` performs various checks on all files in PyMongo and uses tools -that help follow a consistent code style within the codebase. - -To set up ``pre-commit`` locally, run:: - - pip install pre-commit - pre-commit install - -To run ``pre-commit`` manually, run:: - - pre-commit run --all-files - -To run a manual hook like ``flake8`` manually, run:: - - pre-commit run --all-files --hook-stage manual flake8 - -Documentation -------------- - -To contribute to the `API documentation `_ -just make your changes to the inline documentation of the appropriate -`source code `_ or `rst file -`_ in a -branch and submit a `pull request `_. -You might also use the GitHub `Edit `_ -button. - -Running Tests Locally ---------------------- -- Ensure you have started the appropriate Mongo Server(s). -- Run ``pip install tox`` to use ``tox`` for testing or run ``pip install -e ".[test]"`` to run ``pytest`` directly. -- Run ``tox -m test`` or ``pytest`` to run all of the tests. -- Append ``test/.py::::`` to - run specific tests. You can omit the ```` to test a full class - and the ```` to test a full module. For example: - ``tox -m test test/test_change_stream.py::TestUnifiedChangeStreamsErrors::test_change_stream_errors_on_ElectionInProgress``. -- Use the ``-k`` argument to select tests by pattern. - -Running Load Balancer Tests Locally ------------------------------------ -- Install ``haproxy`` (available as ``brew install haproxy`` on macOS). -- Clone ``drivers-evergreen-tools``: ``git clone git@github.com:mongodb-labs/drivers-evergreen-tools.git``. -- Start the servers using ``LOAD_BALANCER=true TOPOLOGY=sharded_cluster AUTH=noauth SSL=nossl MONGODB_VERSION=6.0 DRIVERS_TOOLS=$PWD/drivers-evergreen-tools MONGO_ORCHESTRATION_HOME=$PWD/drivers-evergreen-tools/.evergreen/orchestration $PWD/drivers-evergreen-tools/.evergreen/run-orchestration.sh``. -- Start the load balancer using: ``MONGODB_URI='mongodb://localhost:27017,localhost:27018/' $PWD/drivers-evergreen-tools/.evergreen/run-load-balancer.sh start``. -- Run the tests from the ``pymongo`` checkout directory using: ``TEST_LOADBALANCER=1 tox -m test-eg``. - -Re-sync Spec Tests ------------------- - -If you would like to re-sync the copy of the specification tests in the -PyMongo repository with that which is inside the `specifications repo -`_, please -use the script provided in ``.evergreen/resync-specs.sh``.:: - - git clone git@github.com:mongodb/specifications.git - export MDB_SPECS=~/specifications - cd ~/mongo-python-driver/.evergreen - ./resync-specs.sh -b "" spec1 spec2 ... - ./resync-specs.sh -b "connection-string*" crud bson-corpus # Updates crud and bson-corpus specs while ignoring all files with the regex "connection-string*" - cd .. - -The ``-b`` flag adds as a regex pattern to block files you do not wish to -update in PyMongo. -This is primarily helpful if you are implementing a new feature in PyMongo -that has spec tests already implemented, or if you are attempting to -validate new spec tests in PyMongo. diff --git a/MANIFEST.in b/MANIFEST.in index 710eae8985..b74a716dfb 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include README.rst +include README.md include LICENSE include THIRD-PARTY-NOTICES include *.ini @@ -6,8 +6,8 @@ exclude .coveragerc exclude .git-blame-ignore-revs exclude .pre-commit-config.yaml exclude .readthedocs.yaml -exclude CONTRIBUTING.rst -exclude RELEASE.rst +exclude CONTRIBUTING.md +exclude RELEASE.md recursive-include doc *.rst recursive-include doc *.py recursive-include doc *.conf diff --git a/README.md b/README.md new file mode 100644 index 0000000000..8ae5ebdf6c --- /dev/null +++ b/README.md @@ -0,0 +1,229 @@ +# PyMongo + +See [the mongo site](http://www.mongodb.org) for more information. +See [GitHub](http://github.com/mongodb/mongo-python-driver) for the +latest source. + +Documentation: available at [pymongo.readthedocs.io](https://pymongo.readthedocs.io/en/stable/) +Author: The MongoDB Python Team + +## About + +The PyMongo distribution contains tools for interacting with MongoDB +database from Python. The `bson` package is an implementation of the +[BSON format](http://bsonspec.org) for Python. The `pymongo` package is +a native Python driver for MongoDB. The `gridfs` package is a +[gridfs](https://github.com/mongodb/specifications/blob/master/source/gridfs/gridfs-spec.rst/) +implementation on top of `pymongo`. + +PyMongo supports MongoDB 3.6, 4.0, 4.2, 4.4, 5.0, 6.0, and 7.0. + +## Support / Feedback + +For issues with, questions about, or feedback for PyMongo, please look +into our [support channels](https://support.mongodb.com/welcome). Please +do not email any of the PyMongo developers directly with issues or +questions - you're more likely to get an answer on +[StackOverflow](https://stackoverflow.com/questions/tagged/mongodb) +(using a "mongodb" tag). + +## Bugs / Feature Requests + +Think you've found a bug? Want to see a new feature in PyMongo? Please +open a case in our issue management tool, JIRA: + +- [Create an account and login](https://jira.mongodb.org). +- Navigate to [the PYTHON + project](https://jira.mongodb.org/browse/PYTHON). +- Click **Create Issue** - Please provide as much information as + possible about the issue type and how to reproduce it. + +Bug reports in JIRA for all driver projects (i.e. PYTHON, CSHARP, JAVA) +and the Core Server (i.e. SERVER) project are **public**. + +### How To Ask For Help + +Please include all of the following information when opening an issue: + +- Detailed steps to reproduce the problem, including full traceback, + if possible. + +- The exact python version used, with patch level: + +```bash +python -c "import sys; print(sys.version)" +``` + +- The exact version of PyMongo used, with patch level: + +```bash +python -c "import pymongo; print(pymongo.version); print(pymongo.has_c())" +``` + +- The operating system and version (e.g. Windows 7, OSX 10.8, ...) + +- Web framework or asynchronous network library used, if any, with + version (e.g. Django 1.7, mod_wsgi 4.3.0, gevent 1.0.1, Tornado + 4.0.2, ...) + +### Security Vulnerabilities + +If you've identified a security vulnerability in a driver or any other +MongoDB project, please report it according to the [instructions +here](https://www.mongodb.com/docs/manual/tutorial/create-a-vulnerability-report/). + +## Installation + +PyMongo can be installed with [pip](http://pypi.python.org/pypi/pip): + +```bash +python -m pip install pymongo +``` + +Or `easy_install` from [setuptools](http://pypi.python.org/pypi/setuptools): + +```bash +python -m easy_install pymongo +``` + +You can also download the project source and do: + +```bash +pip install . +``` + +Do **not** install the "bson" package from pypi. PyMongo comes with +its own bson package; running "pip install bson" installs a third-party +package that is incompatible with PyMongo. + +## Dependencies + +PyMongo supports CPython 3.7+ and PyPy3.7+. + +Required dependencies: + +Support for `mongodb+srv://` URIs requires [dnspython](https://pypi.python.org/pypi/dnspython) + +Optional dependencies: + +GSSAPI authentication requires +[pykerberos](https://pypi.python.org/pypi/pykerberos) on Unix or +[WinKerberos](https://pypi.python.org/pypi/winkerberos) on Windows. The +correct dependency can be installed automatically along with PyMongo: + +```bash +python -m pip install "pymongo[gssapi]" +``` + +MONGODB-AWS authentication requires +[pymongo-auth-aws](https://pypi.org/project/pymongo-auth-aws/): + +```bash +python -m pip install "pymongo[aws]" +``` + +OCSP (Online Certificate Status Protocol) requires +[PyOpenSSL](https://pypi.org/project/pyOpenSSL/), +[requests](https://pypi.org/project/requests/), +[service_identity](https://pypi.org/project/service_identity/) and may +require [certifi](https://pypi.python.org/pypi/certifi): + +```bash +python -m pip install "pymongo[ocsp]" +``` + +Wire protocol compression with snappy requires +[python-snappy](https://pypi.org/project/python-snappy): + +```bash +python -m pip install "pymongo[snappy]" +``` + +Wire protocol compression with zstandard requires +[zstandard](https://pypi.org/project/zstandard): + +```bash +python -m pip install "pymongo[zstd]" +``` + +Client-Side Field Level Encryption requires +[pymongocrypt](https://pypi.org/project/pymongocrypt/) and +[pymongo-auth-aws](https://pypi.org/project/pymongo-auth-aws/): + +```bash +python -m pip install "pymongo[encryption]" +``` +You can install all dependencies automatically with the following +command: + +```bash +python -m pip install "pymongo[gssapi,aws,ocsp,snappy,zstd,encryption]" +``` + +Additional dependencies are: + +- (to generate documentation or run tests) + [tox](https://tox.wiki/en/latest/index.html) + +## Examples + +Here's a basic example (for more see the *examples* section of the +docs): + +```pycon +>>> import pymongo +>>> client = pymongo.MongoClient("localhost", 27017) +>>> db = client.test +>>> db.name +'test' +>>> db.my_collection +Collection(Database(MongoClient('localhost', 27017), 'test'), 'my_collection') +>>> db.my_collection.insert_one({"x": 10}).inserted_id +ObjectId('4aba15ebe23f6b53b0000000') +>>> db.my_collection.insert_one({"x": 8}).inserted_id +ObjectId('4aba160ee23f6b543e000000') +>>> db.my_collection.insert_one({"x": 11}).inserted_id +ObjectId('4aba160ee23f6b543e000002') +>>> db.my_collection.find_one() +{'x': 10, '_id': ObjectId('4aba15ebe23f6b53b0000000')} +>>> for item in db.my_collection.find(): +... print(item["x"]) +... +10 +8 +11 +>>> db.my_collection.create_index("x") +'x_1' +>>> for item in db.my_collection.find().sort("x", pymongo.ASCENDING): +... print(item["x"]) +... +8 +10 +11 +>>> [item["x"] for item in db.my_collection.find().limit(2).skip(1)] +[8, 11] +``` + +## Documentation + +Documentation is available at +[pymongo.readthedocs.io](https://pymongo.readthedocs.io/en/stable/). + +Documentation can be generated by running **tox -m doc**. Generated +documentation can be found in the `doc/build/html/` directory. + +## Learning Resources + +- MongoDB Learn - [Python +courses](https://learn.mongodb.com/catalog?labels=%5B%22Language%22%5D&values=%5B%22Python%22%5D). +- [Python Articles on Developer +Center](https://www.mongodb.com/developer/languages/python/). + +## Testing + +The easiest way to run the tests is to run **tox -m test** in the root +of the distribution. For example, + +```bash +tox -e test +``` diff --git a/README.rst b/README.rst deleted file mode 100644 index 3172ecb8aa..0000000000 --- a/README.rst +++ /dev/null @@ -1,212 +0,0 @@ -======= -PyMongo -======= -:Info: See `the mongo site `_ for more information. See `GitHub `_ for the latest source. -:Documentation: Available at `pymongo.readthedocs.io `_ -:Author: The MongoDB Python Team - -About -===== - -The PyMongo distribution contains tools for interacting with MongoDB -database from Python. The ``bson`` package is an implementation of -the `BSON format `_ for Python. The ``pymongo`` -package is a native Python driver for MongoDB. The ``gridfs`` package -is a `gridfs -`_ -implementation on top of ``pymongo``. - -PyMongo supports MongoDB 3.6, 4.0, 4.2, 4.4, 5.0, 6.0, and 7.0. - -Support / Feedback -================== - -For issues with, questions about, or feedback for PyMongo, please look into -our `support channels `_. Please -do not email any of the PyMongo developers directly with issues or -questions - you're more likely to get an answer on `StackOverflow `_ -(using a "mongodb" tag). - -Bugs / Feature Requests -======================= - -Think you’ve found a bug? Want to see a new feature in PyMongo? Please open a -case in our issue management tool, JIRA: - -- `Create an account and login `_. -- Navigate to `the PYTHON project `_. -- Click **Create Issue** - Please provide as much information as possible about the issue type and how to reproduce it. - -Bug reports in JIRA for all driver projects (i.e. PYTHON, CSHARP, JAVA) and the -Core Server (i.e. SERVER) project are **public**. - -How To Ask For Help -------------------- - -Please include all of the following information when opening an issue: - -- Detailed steps to reproduce the problem, including full traceback, if possible. -- The exact python version used, with patch level:: - - $ python -c "import sys; print(sys.version)" - -- The exact version of PyMongo used, with patch level:: - - $ python -c "import pymongo; print(pymongo.version); print(pymongo.has_c())" - -- The operating system and version (e.g. Windows 7, OSX 10.8, ...) -- Web framework or asynchronous network library used, if any, with version (e.g. - Django 1.7, mod_wsgi 4.3.0, gevent 1.0.1, Tornado 4.0.2, ...) - -Security Vulnerabilities ------------------------- - -If you’ve identified a security vulnerability in a driver or any other -MongoDB project, please report it according to the `instructions here -`_. - -Installation -============ - -PyMongo can be installed with `pip `_:: - - $ python -m pip install pymongo - -Or ``easy_install`` from -`setuptools `_:: - - $ python -m easy_install pymongo - -You can also download the project source and do:: - - $ pip install . - -Do **not** install the "bson" package from pypi. PyMongo comes with its own -bson package; doing "easy_install bson" installs a third-party package that -is incompatible with PyMongo. - -Dependencies -============ - -PyMongo supports CPython 3.7+ and PyPy3.7+. - -Required dependencies: - -Support for mongodb+srv:// URIs requires `dnspython -`_ - -Optional dependencies: - -GSSAPI authentication requires `pykerberos -`_ on Unix or `WinKerberos -`_ on Windows. The correct -dependency can be installed automatically along with PyMongo:: - - $ python -m pip install "pymongo[gssapi]" - -MONGODB-AWS authentication requires `pymongo-auth-aws -`_:: - - $ python -m pip install "pymongo[aws]" - -OCSP (Online Certificate Status Protocol) requires `PyOpenSSL -`_, `requests -`_, `service_identity -`_ and may -require `certifi -`_:: - - $ python -m pip install "pymongo[ocsp]" - -Wire protocol compression with snappy requires `python-snappy -`_:: - - $ python -m pip install "pymongo[snappy]" - -Wire protocol compression with zstandard requires `zstandard -`_:: - - $ python -m pip install "pymongo[zstd]" - -Client-Side Field Level Encryption requires `pymongocrypt -`_ and -`pymongo-auth-aws `_:: - - $ python -m pip install "pymongo[encryption]" - -You can install all dependencies automatically with the following -command:: - - $ python -m pip install "pymongo[gssapi,aws,ocsp,snappy,zstd,encryption]" - -Additional dependencies are: - -- (to generate documentation or run tests) tox_ - -Examples -======== -Here's a basic example (for more see the *examples* section of the docs): - -.. code-block:: pycon - - >>> import pymongo - >>> client = pymongo.MongoClient("localhost", 27017) - >>> db = client.test - >>> db.name - 'test' - >>> db.my_collection - Collection(Database(MongoClient('localhost', 27017), 'test'), 'my_collection') - >>> db.my_collection.insert_one({"x": 10}).inserted_id - ObjectId('4aba15ebe23f6b53b0000000') - >>> db.my_collection.insert_one({"x": 8}).inserted_id - ObjectId('4aba160ee23f6b543e000000') - >>> db.my_collection.insert_one({"x": 11}).inserted_id - ObjectId('4aba160ee23f6b543e000002') - >>> db.my_collection.find_one() - {'x': 10, '_id': ObjectId('4aba15ebe23f6b53b0000000')} - >>> for item in db.my_collection.find(): - ... print(item["x"]) - ... - 10 - 8 - 11 - >>> db.my_collection.create_index("x") - 'x_1' - >>> for item in db.my_collection.find().sort("x", pymongo.ASCENDING): - ... print(item["x"]) - ... - 8 - 10 - 11 - >>> [item["x"] for item in db.my_collection.find().limit(2).skip(1)] - [8, 11] - -Documentation -============= - -Documentation is available at `pymongo.readthedocs.io `_. - -Documentation can be generated by running **tox -m doc**. Generated documentation can be found in the -*doc/build/html/* directory. - -Learning Resources -================== - -MongoDB Learn - `Python courses `_. -`Python Articles on Developer Center `_. - -Testing -======= - -The easiest way to run the tests is to run **tox -m test** in -the root of the distribution. - -To verify that PyMongo works with Gevent's monkey-patching:: - - $ python green_framework_test.py gevent - -Or with Eventlet's:: - - $ python green_framework_test.py eventlet - -.. _tox: https://tox.wiki/en/latest/index.html diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 0000000000..05d7c8d63d --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,89 @@ +# Some notes on PyMongo releases + +## Versioning + +We follow [semver](https://semver.org/) and [pep-0440](https://www.python.org/dev/peps/pep-0440) +for versioning. + +We shoot for a release every few months - that will generally just +increment the middle / minor version number (e.g. `3.5.0` -> `3.6.0`). + +Patch releases are reserved for bug fixes (in general no new features or +deprecations) - they only happen in cases where there is a critical bug +in a recently released version, or when a release has no new features or +API changes. + +In between releases we add `.devN` to the version number to denote the +version under development. So if we just released `3.6.0`, then the +current dev version might be `3.6.1.dev0` or `3.7.0.dev0`. When we make the +next release we replace all instances of `3.x.x.devN` in the docs with the +new version number. + +## Deprecation + +Changes should be backwards compatible unless absolutely necessary. When +making API changes the approach is generally to add a deprecation +warning but keeping the existing API functional. Deprecated features can +be removed in a release that changes the major version number. + +## Doing a Release + +1. PyMongo is tested on Evergreen. Ensure the latest commit are passing + [CI](https://spruce.mongodb.com/commits/mongo-python-driver) as expected. + +2. Check Jira to ensure all the tickets in this version have been + completed. + +3. Add release notes to `doc/changelog.rst`. Generally just + summarize/clarify the git log, but you might add some more long form + notes for big changes. + +4. Make sure version number is updated in `pymongo/_version.py` + +5. Commit with a BUMP version_number message, eg + `git commit -m 'BUMP 3.11.0'`. + +6. Tag w/ version_number, eg, + `git tag -a '3.11.0' -m 'BUMP 3.11.0' `. + +7. Bump the version number to `.dev0` in + `pymongo/_version.py`, commit, push. + +8. Push commit / tag, eg `git push && git push --tags`. + +9. Pushing a tag will trigger a release process in Evergreen which + builds wheels for manylinux, macOS, and Windows. Wait for the + "release-combine" task to complete and then download the "Release + files all" archive. See https://spruce.mongodb.com/commits/mongo-python-driver?buildVariants=release&view=ALL + + The contents should look like this: + + $ ls path/to/archive + pymongo--cp310-cp310-macosx_10_9_universal2.whl + ... + pymongo--cp38-cp38-manylinux2014_x86_64.whl + ... + pymongo--cp38-cp38-win_amd64.whl + ... + pymongo-.tar.gz + +10. Upload all the release packages to PyPI with twine: + + $ python3 -m twine upload path/to/archive/* + +11. Make sure the new version appears on + `https://pymongo.readthedocs.io/en/stable/`. If the new version does not show + up automatically, trigger a rebuild of "latest" on https://readthedocs.org/projects/pymongo/builds/. + +12. Publish the release version in Jira and add a description of the release, such as a the reason + or the main feature. + +13. Announce the release on the [community forum](https://www.mongodb.com/community/forums/tags/c/announcements/driver-releases/110/python) + +14. File a ticket for DOCSP highlighting changes in server version and + Python version compatibility or the lack thereof, for example https://jira.mongodb.org/browse/DOCSP-34040 + +15. Create a GitHub Release for the tag using https://github.com/mongodb/mongo-python-driver/releases/new. + The title should be "PyMongo X.Y.Z", and the description should + contain a link to the release notes on the the community forum, e.g. + "Release notes: mongodb.com/community/forums/t/pymongo-4-0-2-released/150457" diff --git a/RELEASE.rst b/RELEASE.rst deleted file mode 100644 index cabddef84c..0000000000 --- a/RELEASE.rst +++ /dev/null @@ -1,90 +0,0 @@ -Some notes on PyMongo releases -============================== - -Versioning ----------- - -We shoot for a release every few months - that will generally just -increment the middle / minor version number (e.g. 3.5.0 -> 3.6.0). - -Patch releases are reserved for bug fixes (in general no new features -or deprecations) - they only happen in cases where there is a critical -bug in a recently released version, or when a release has no new -features or API changes. - -In between releases we add .devN to the version number to denote the version -under development. So if we just released 3.6.0, then the current dev -version might be 3.6.1.dev0 or 3.7.0.dev0. When we make the next release we -replace all instances of 3.x.x.devN in the docs with the new version number. - -https://semver.org/ -https://www.python.org/dev/peps/pep-0440/ - -Deprecation ------------ - -Changes should be backwards compatible unless absolutely necessary. When making -API changes the approach is generally to add a deprecation warning but keeping -the existing API functional. Deprecated features can be removed in a release -that changes the major version number. - -Doing a Release ---------------- - -1. PyMongo is tested on Evergreen. Ensure the latest commit are passing CI - as expected: https://evergreen.mongodb.com/waterfall/mongo-python-driver. - -2. Check Jira to ensure all the tickets in this version have been completed. - -3. Add release notes to doc/changelog.rst. Generally just summarize/clarify - the git log, but you might add some more long form notes for big changes. - -4. Make sure version number is updated in ``pymongo/_version.py`` - -5. Commit with a BUMP version_number message, eg ``git commit -m 'BUMP 3.11.0'``. - -6. Tag w/ version_number, eg, ``git tag -a '3.11.0' -m 'BUMP 3.11.0' ``. - -7. Push commit / tag, eg ``git push && git push --tags``. - -8. Pushing a tag will trigger a release process in Evergreen which builds - wheels for manylinux, macOS, and Windows. Wait for the "release-combine" - task to complete and then download the "Release files all" archive. See: - https://evergreen.mongodb.com/waterfall/mongo-python-driver?bv_filter=release - - The contents should look like this:: - - $ ls path/to/archive - pymongo--cp310-cp310-macosx_10_9_universal2.whl - ... - pymongo--cp38-cp38-manylinux2014_x86_64.whl - ... - pymongo--cp38-cp38-win_amd64.whl - ... - pymongo-.tar.gz - -9. Upload all the release packages to PyPI with twine:: - - $ python3 -m twine upload path/to/archive/* - -10. Make sure the new version appears on https://pymongo.readthedocs.io/. If the - new version does not show up automatically, trigger a rebuild of "latest": - https://readthedocs.org/projects/pymongo/builds/ - -11. Bump the version number to .dev0 in ``pymongo/_version.py``, - commit, push. - -12. Publish the release version in Jira. - -13. Announce the release on: - https://www.mongodb.com/community/forums/tags/c/announcements/driver-releases/110/python - -14. File a ticket for DOCSP highlighting changes in server version and Python - version compatibility or the lack thereof, for example: - https://jira.mongodb.org/browse/DOCSP-34040 - -15. Create a GitHub Release for the tag using - https://github.com/mongodb/mongo-python-driver/releases/new. - The title should be "PyMongo X.Y.Z", and the description should contain - a link to the release notes on the the community forum, e.g. - "Release notes: mongodb.com/community/forums/t/pymongo-4-0-2-released/150457." diff --git a/pyproject.toml b/pyproject.toml index f70684ede7..db2c956690 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" name = "pymongo" dynamic = ["version"] description = "Python driver for MongoDB " -readme = "README.rst" +readme = "README.md" license = {file="LICENSE"} requires-python = ">=3.7" authors = [ From 6537415da70c2a15cad52ebe7669673947ace65a Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 27 Nov 2023 09:24:00 -0600 Subject: [PATCH 019/221] PYTHON-3605 Move type annotations to parameter list in rendered docs (#1441) --- CONTRIBUTING.md | 2 + bson/__init__.py | 50 ++- bson/binary.py | 13 +- bson/code.py | 7 +- bson/codec_options.py | 20 +- bson/datetime_ms.py | 6 +- bson/dbref.py | 9 +- bson/decimal128.py | 9 +- bson/int64.py | 3 +- bson/json_util.py | 29 +- bson/objectid.py | 12 +- bson/raw_bson.py | 10 +- bson/regex.py | 8 +- bson/timestamp.py | 5 +- doc/conf.py | 2 + gridfs/__init__.py | 181 +++++------ gridfs/grid_file.py | 30 +- pymongo/__init__.py | 6 +- pymongo/bulk.py | 8 +- pymongo/change_stream.py | 3 +- pymongo/client_session.py | 37 +-- pymongo/collation.py | 21 +- pymongo/collection.py | 531 +++++++++++++++----------------- pymongo/command_cursor.py | 6 +- pymongo/common.py | 5 +- pymongo/cursor.py | 50 +-- pymongo/database.py | 204 ++++++------ pymongo/encryption.py | 192 +++++------- pymongo/encryption_options.py | 38 ++- pymongo/message.py | 17 +- pymongo/mongo_client.py | 191 +++++------- pymongo/monitoring.py | 275 +++++++---------- pymongo/network.py | 43 ++- pymongo/operations.py | 64 ++-- pymongo/periodic_executor.py | 9 +- pymongo/pool.py | 63 ++-- pymongo/read_concern.py | 3 +- pymongo/read_preferences.py | 31 +- pymongo/response.py | 28 +- pymongo/results.py | 5 +- pymongo/saslprep.py | 8 +- pymongo/server.py | 14 +- pymongo/server_api.py | 12 +- pymongo/server_description.py | 11 +- pymongo/topology.py | 12 +- pymongo/topology_description.py | 33 +- pymongo/uri_parser.py | 59 ++-- pymongo/write_concern.py | 9 +- test/test_comment.py | 2 +- 49 files changed, 1026 insertions(+), 1360 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bf25f0af49..0a2122f699 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -172,6 +172,8 @@ request](https://help.github.com/articles/using-pull-requests). You might also use the GitHub [Edit](https://github.com/blog/844-forking-with-the-edit-button) button. +We use the [Sphinx docstring format](https://sphinx-rtd-tutorial.readthedocs.io/en/latest/docstrings.html). + You can build the documentation locally by running: ```bash diff --git a/bson/__init__.py b/bson/__init__.py index 2c4bd3a8b2..66517fc075 100644 --- a/bson/__init__.py +++ b/bson/__init__.py @@ -1026,12 +1026,11 @@ def encode( :class:`~bson.errors.InvalidDocument` if `document` cannot be converted to :class:`BSON`. - :Parameters: - - `document`: mapping type representing a document - - `check_keys` (optional): check if keys start with '$' or + :param document: mapping type representing a document + :param check_keys: check if keys start with '$' or contain '.', raising :class:`~bson.errors.InvalidDocument` in either case - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionadded:: 3.9 @@ -1072,10 +1071,9 @@ def decode( >>> type(decoded_doc) - :Parameters: - - `data`: the BSON to decode. Any bytes-like object that implements + :param data: the BSON to decode. Any bytes-like object that implements the buffer protocol. - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionadded:: 3.9 @@ -1141,9 +1139,8 @@ def decode_all( `data` must be a bytes-like object implementing the buffer protocol that provides concatenated, valid, BSON-encoded documents. - :Parameters: - - `data`: BSON data - - `codec_options` (optional): An instance of + :param data: BSON data + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionchanged:: 3.9 @@ -1238,20 +1235,18 @@ def _decode_all_selective( `data` must be a string representing a valid, BSON-encoded document. - :Parameters: - - `data`: BSON data - - `codec_options`: An instance of + :param data: BSON data + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions` with user-specified type decoders. If no decoders are found, this method is the same as ``decode_all``. - - `fields`: Map of document namespaces where data that needs + :param fields: Map of document namespaces where data that needs to be custom decoded lives or None. For example, to custom decode a list of objects in 'field1.subfield1', the specified value should be ``{'field1': {'subfield1': 1}}``. If ``fields`` is an empty map or None, this method is the same as ``decode_all``. - :Returns: - - `document_list`: Single-member list containing the decoded document. + :return: Single-member list containing the decoded document. .. versionadded:: 3.8 """ @@ -1298,9 +1293,8 @@ def decode_iter( `data` must be a string of concatenated, valid, BSON-encoded documents. - :Parameters: - - `data`: BSON data - - `codec_options` (optional): An instance of + :param data: BSON data + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionchanged:: 3.0 @@ -1346,9 +1340,8 @@ def decode_file_iter( Works similarly to the decode_all function, but reads from the file object in chunks and parses bson in chunks, yielding one document at a time. - :Parameters: - - `file_obj`: A file object containing BSON data. - - `codec_options` (optional): An instance of + :param file_obj: A file object containing BSON data. + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionchanged:: 3.0 @@ -1377,8 +1370,7 @@ def is_valid(bson: bytes) -> bool: :class:`bytes`. Returns ``True`` if `bson` is valid :class:`BSON`, ``False`` otherwise. - :Parameters: - - `bson`: the data to be validated + :param bson: the data to be validated """ if not isinstance(bson, bytes): raise TypeError("BSON data must be an instance of a subclass of bytes") @@ -1414,12 +1406,11 @@ def encode( :class:`str'. Raises :class:`~bson.errors.InvalidDocument` if `document` cannot be converted to :class:`BSON`. - :Parameters: - - `document`: mapping type representing a document - - `check_keys` (optional): check if keys start with '$' or + :param document: mapping type representing a document + :param check_keys: check if keys start with '$' or contain '.', raising :class:`~bson.errors.InvalidDocument` in either case - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionchanged:: 3.0 @@ -1447,8 +1438,7 @@ def decode( # type:ignore[override] >>> type(decoded_doc) - :Parameters: - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. .. versionchanged:: 3.0 diff --git a/bson/binary.py b/bson/binary.py index a4cd44e930..be33464462 100644 --- a/bson/binary.py +++ b/bson/binary.py @@ -211,10 +211,9 @@ class Binary(bytes): .. note:: Instances of Binary with subtype 0 will be decoded directly to :class:`bytes`. - :Parameters: - - `data`: the binary data to represent. Can be any bytes-like type + :param data: the binary data to represent. Can be any bytes-like type that implements the buffer protocol. - - `subtype` (optional): the `binary subtype + :param subtype: the `binary subtype `_ to use @@ -253,9 +252,8 @@ def from_uuid( Raises :exc:`TypeError` if `uuid` is not an instance of :class:`~uuid.UUID`. - :Parameters: - - `uuid`: A :class:`uuid.UUID` instance. - - `uuid_representation`: A member of + :param uuid: A :class:`uuid.UUID` instance. + :param uuid_representation: A member of :class:`~bson.binary.UuidRepresentation`. Default: :const:`~bson.binary.UuidRepresentation.STANDARD`. See :ref:`handling-uuid-data-example` for details. @@ -304,8 +302,7 @@ def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUI Raises :exc:`ValueError` if this :class:`~bson.binary.Binary` instance does not contain a UUID. - :Parameters: - - `uuid_representation`: A member of + :param uuid_representation: A member of :class:`~bson.binary.UuidRepresentation`. Default: :const:`~bson.binary.UuidRepresentation.STANDARD`. See :ref:`handling-uuid-data-example` for details. diff --git a/bson/code.py b/bson/code.py index 689cda4acd..6b4541d0ff 100644 --- a/bson/code.py +++ b/bson/code.py @@ -31,15 +31,14 @@ class Code(str): keyword argument it will override any setting for that variable in the `scope` dictionary. - :Parameters: - - `code`: A string containing JavaScript code to be evaluated or another + :param code: A string containing JavaScript code to be evaluated or another instance of Code. In the latter case, the scope of `code` becomes this Code's :attr:`scope`. - - `scope` (optional): dictionary representing the scope in which + :param scope: dictionary representing the scope in which `code` should be evaluated - a mapping from identifiers (as strings) to values. Defaults to ``None``. This is applied after any scope associated with a given `code` above. - - `**kwargs` (optional): scope variables can also be passed as + :param kwargs: scope variables can also be passed as keyword arguments. These are applied after `scope` and `code`. .. versionchanged:: 3.4 diff --git a/bson/codec_options.py b/bson/codec_options.py index 2c64c64600..3a0b83b7be 100644 --- a/bson/codec_options.py +++ b/bson/codec_options.py @@ -120,14 +120,13 @@ class TypeRegistry: See :ref:`custom-type-type-registry` documentation for an example. - :Parameters: - - `type_codecs` (optional): iterable of type codec instances. If + :param type_codecs: iterable of type codec instances. If ``type_codecs`` contains multiple codecs that transform a single python or BSON type, the transformation specified by the type codec occurring last prevails. A TypeError will be raised if one or more type codecs modify the encoding behavior of a built-in :mod:`bson` type. - - `fallback_encoder` (optional): callable that accepts a single, + :param fallback_encoder: callable that accepts a single, unencodable python value and transforms it into a type that :mod:`bson` can encode. See :ref:`fallback-encoder-callable` documentation for an example. @@ -324,30 +323,29 @@ def __init__(self, *args, **kwargs): See :doc:`/examples/uuid` for examples using the `uuid_representation` option. - :Parameters: - - `document_class`: BSON documents returned in queries will be decoded + :param document_class: BSON documents returned in queries will be decoded to an instance of this class. Must be a subclass of :class:`~collections.abc.MutableMapping`. Defaults to :class:`dict`. - - `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone + :param tz_aware: If ``True``, BSON datetimes will be decoded to timezone aware instances of :class:`~datetime.datetime`. Otherwise they will be naive. Defaults to ``False``. - - `uuid_representation`: The BSON representation to use when encoding + :param uuid_representation: The BSON representation to use when encoding and decoding instances of :class:`~uuid.UUID`. Defaults to :data:`~bson.binary.UuidRepresentation.UNSPECIFIED`. New applications should consider setting this to :data:`~bson.binary.UuidRepresentation.STANDARD` for cross language compatibility. See :ref:`handling-uuid-data-example` for details. - - `unicode_decode_error_handler`: The error handler to apply when + :param unicode_decode_error_handler: The error handler to apply when a Unicode-related error occurs during BSON decoding that would otherwise raise :exc:`UnicodeDecodeError`. Valid options include 'strict', 'replace', 'backslashreplace', 'surrogateescape', and 'ignore'. Defaults to 'strict'. - - `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the + :param tzinfo: A :class:`~datetime.tzinfo` subclass that specifies the timezone to/from which :class:`~datetime.datetime` objects should be encoded/decoded. - - `type_registry`: Instance of :class:`TypeRegistry` used to customize + :param type_registry: Instance of :class:`TypeRegistry` used to customize encoding and decoding behavior. - - `datetime_conversion`: Specifies how UTC datetimes should be decoded + :param datetime_conversion: Specifies how UTC datetimes should be decoded within BSON. Valid options include 'datetime_ms' to return as a DatetimeMS, 'datetime' to return as a datetime.datetime and raising a ValueError for out-of-range values, 'datetime_auto' to diff --git a/bson/datetime_ms.py b/bson/datetime_ms.py index b6aebd05d0..160f63faa1 100644 --- a/bson/datetime_ms.py +++ b/bson/datetime_ms.py @@ -55,8 +55,7 @@ def __init__(self, value: Union[int, datetime.datetime]): 'datetime_auto'. See :ref:`handling-out-of-range-datetimes` for details. - :Parameters: - - `value`: An instance of :class:`datetime.datetime` to be + :param value: An instance of :class:`datetime.datetime` to be represented as milliseconds since the Unix epoch, or int of milliseconds since the Unix epoch. """ @@ -104,8 +103,7 @@ def as_datetime( ) -> datetime.datetime: """Create a Python :class:`~datetime.datetime` from this DatetimeMS object. - :Parameters: - - `codec_options`: A CodecOptions instance for specifying how the + :param codec_options: A CodecOptions instance for specifying how the resulting DatetimeMS object will be formatted using ``tz_aware`` and ``tz_info``. Defaults to :const:`~bson.codec_options.DEFAULT_CODEC_OPTIONS`. diff --git a/bson/dbref.py b/bson/dbref.py index 50fcf6c02f..6c21b8162c 100644 --- a/bson/dbref.py +++ b/bson/dbref.py @@ -47,11 +47,10 @@ def __init__( keyword arguments will create additional fields in the resultant embedded document. - :Parameters: - - `collection`: name of the collection the document is stored in - - `id`: the value of the document's ``"_id"`` field - - `database` (optional): name of the database to reference - - `**kwargs` (optional): additional keyword arguments will + :param collection: name of the collection the document is stored in + :param id: the value of the document's ``"_id"`` field + :param database: name of the database to reference + :param kwargs: additional keyword arguments will create additional, custom fields .. seealso:: The MongoDB documentation on `dbrefs `_. diff --git a/bson/decimal128.py b/bson/decimal128.py index f807452a6c..8581d5a3c8 100644 --- a/bson/decimal128.py +++ b/bson/decimal128.py @@ -70,8 +70,7 @@ def create_decimal128_context() -> decimal.Context: def _decimal_to_128(value: _VALUE_OPTIONS) -> Tuple[int, int]: """Converts a decimal.Decimal to BID (high bits, low bits). - :Parameters: - - `value`: An instance of decimal.Decimal + :param value: An instance of decimal.Decimal """ with decimal.localcontext(_DEC128_CTX) as ctx: value = ctx.create_decimal(value) @@ -126,8 +125,7 @@ class Decimal128: >>> Decimal128((3474527112516337664, 5)) Decimal128('0.0005') - :Parameters: - - `value`: An instance of :class:`decimal.Decimal`, string, or tuple of + :param value: An instance of :class:`decimal.Decimal`, string, or tuple of (high bits, low bits) from Binary Integer Decimal (BID) format. .. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context` @@ -275,8 +273,7 @@ def from_bid(cls: Type[Decimal128], value: bytes) -> Decimal128: """Create an instance of :class:`Decimal128` from Binary Integer Decimal string. - :Parameters: - - `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating + :param value: 16 byte string (128-bit IEEE 754-2008 decimal floating point in Binary Integer Decimal (BID) format). """ if not isinstance(value, bytes): diff --git a/bson/int64.py b/bson/int64.py index c0676839ab..5846504a2d 100644 --- a/bson/int64.py +++ b/bson/int64.py @@ -25,8 +25,7 @@ class Int64(int): Python 3. Small integral numbers are encoded to BSON int32 by default, but Int64 numbers will always be encoded to BSON int64. - :Parameters: - - `value`: the numeric value to represent + :param value: the numeric value to represent """ __slots__ = () diff --git a/bson/json_util.py b/bson/json_util.py index 1a74a81368..3ef15fd2cb 100644 --- a/bson/json_util.py +++ b/bson/json_util.py @@ -246,33 +246,32 @@ class JSONOptions(_BASE_CLASS): def __init__(self, *args: Any, **kwargs: Any): """Encapsulates JSON options for :func:`dumps` and :func:`loads`. - :Parameters: - - `strict_number_long`: If ``True``, :class:`~bson.int64.Int64` objects + :param strict_number_long: If ``True``, :class:`~bson.int64.Int64` objects are encoded to MongoDB Extended JSON's *Strict mode* type `NumberLong`, ie ``'{"$numberLong": "" }'``. Otherwise they will be encoded as an `int`. Defaults to ``False``. - - `datetime_representation`: The representation to use when encoding + :param datetime_representation: The representation to use when encoding instances of :class:`datetime.datetime`. Defaults to :const:`~DatetimeRepresentation.LEGACY`. - - `strict_uuid`: If ``True``, :class:`uuid.UUID` object are encoded to + :param strict_uuid: If ``True``, :class:`uuid.UUID` object are encoded to MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it will be encoded as ``'{"$uuid": "" }'``. Defaults to ``False``. - - `json_mode`: The :class:`JSONMode` to use when encoding BSON types to + :param json_mode: The :class:`JSONMode` to use when encoding BSON types to Extended JSON. Defaults to :const:`~JSONMode.LEGACY`. - - `document_class`: BSON documents returned by :func:`loads` will be + :param document_class: BSON documents returned by :func:`loads` will be decoded to an instance of this class. Must be a subclass of :class:`collections.MutableMapping`. Defaults to :class:`dict`. - - `uuid_representation`: The :class:`~bson.binary.UuidRepresentation` + :param uuid_representation: The :class:`~bson.binary.UuidRepresentation` to use when encoding and decoding instances of :class:`uuid.UUID`. Defaults to :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. - - `tz_aware`: If ``True``, MongoDB Extended JSON's *Strict mode* type + :param tz_aware: If ``True``, MongoDB Extended JSON's *Strict mode* type `Date` will be decoded to timezone aware instances of :class:`datetime.datetime`. Otherwise they will be naive. Defaults to ``False``. - - `tzinfo`: A :class:`datetime.tzinfo` subclass that specifies the + :param tzinfo: A :class:`datetime.tzinfo` subclass that specifies the timezone from which :class:`~datetime.datetime` objects should be decoded. Defaults to :const:`~bson.tz_util.utc`. - - `datetime_conversion`: Specifies how UTC datetimes should be decoded + :param datetime_conversion: Specifies how UTC datetimes should be decoded within BSON. Valid options include 'datetime_ms' to return as a DatetimeMS, 'datetime' to return as a datetime.datetime and raising a ValueError for out-of-range values, 'datetime_auto' to @@ -280,8 +279,8 @@ def __init__(self, *args: Any, **kwargs: Any): out-of-range and 'datetime_clamp' to clamp to the minimum and maximum possible datetimes. Defaults to 'datetime'. See :ref:`handling-out-of-range-datetimes` for details. - - `args`: arguments to :class:`~bson.codec_options.CodecOptions` - - `kwargs`: arguments to :class:`~bson.codec_options.CodecOptions` + :param args: arguments to :class:`~bson.codec_options.CodecOptions` + :param kwargs: arguments to :class:`~bson.codec_options.CodecOptions` .. seealso:: The specification for Relaxed and Canonical `Extended JSON`_. @@ -456,8 +455,7 @@ def dumps(obj: Any, *args: Any, **kwargs: Any) -> str: Recursive function that handles all BSON types including :class:`~bson.binary.Binary` and :class:`~bson.code.Code`. - :Parameters: - - `json_options`: A :class:`JSONOptions` instance used to modify the + :param json_options: A :class:`JSONOptions` instance used to modify the encoding of MongoDB Extended JSON types. Defaults to :const:`DEFAULT_JSON_OPTIONS`. @@ -480,8 +478,7 @@ def loads(s: Union[str, bytes, bytearray], *args: Any, **kwargs: Any) -> Any: Raises ``TypeError``, ``ValueError``, ``KeyError``, or :exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON. - :Parameters: - - `json_options`: A :class:`JSONOptions` instance used to modify the + :param json_options: A :class:`JSONOptions` instance used to modify the decoding of MongoDB Extended JSON types. Defaults to :const:`DEFAULT_JSON_OPTIONS`. diff --git a/bson/objectid.py b/bson/objectid.py index 2a3d9ebf5b..57efdc7983 100644 --- a/bson/objectid.py +++ b/bson/objectid.py @@ -84,8 +84,7 @@ def __init__(self, oid: Optional[Union[str, ObjectId, bytes]] = None) -> None: Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor 24 hex digits, or :class:`TypeError` if `oid` is not an accepted type. - :Parameters: - - `oid` (optional): a valid ObjectId. + :param oid: a valid ObjectId. .. seealso:: The MongoDB documentation on `ObjectIds `_. @@ -126,8 +125,7 @@ def from_datetime(cls: Type[ObjectId], generation_time: datetime.datetime) -> Ob >>> dummy_id = ObjectId.from_datetime(gen_time) >>> result = collection.find({"_id": {"$lt": dummy_id}}) - :Parameters: - - `generation_time`: :class:`~datetime.datetime` to be used + :param generation_time: :class:`~datetime.datetime` to be used as the generation time for the resulting ObjectId. """ offset = generation_time.utcoffset() @@ -141,8 +139,7 @@ def from_datetime(cls: Type[ObjectId], generation_time: datetime.datetime) -> Ob def is_valid(cls: Type[ObjectId], oid: Any) -> bool: """Checks if a `oid` string is valid or not. - :Parameters: - - `oid`: the object id to validate + :param oid: the object id to validate .. versionadded:: 2.3 """ @@ -186,8 +183,7 @@ def __validate(self, oid: Any) -> None: :class:`bytes`, or ObjectId. Raises InvalidId if it is not a valid ObjectId. - :Parameters: - - `oid`: a valid ObjectId + :param oid: a valid ObjectId """ if isinstance(oid, ObjectId): self.__id = oid.binary diff --git a/bson/raw_bson.py b/bson/raw_bson.py index 50362398a3..8bada7d1d5 100644 --- a/bson/raw_bson.py +++ b/bson/raw_bson.py @@ -65,9 +65,8 @@ def _inflate_bson( ) -> MutableMapping[str, Any]: """Inflates the top level fields of a BSON document. - :Parameters: - - `bson_bytes`: the BSON bytes that compose this document - - `codec_options`: An instance of + :param bson_bytes: the BSON bytes that compose this document + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions` whose ``document_class`` must be :class:`RawBSONDocument`. """ @@ -110,9 +109,8 @@ class from the standard library so it can be used like a read-only >>> raw_doc['_id'] 'my_doc' - :Parameters: - - `bson_bytes`: the BSON bytes that compose this document - - `codec_options` (optional): An instance of + :param bson_bytes: the BSON bytes that compose this document + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions` whose ``document_class`` must be :class:`RawBSONDocument`. The default is :attr:`DEFAULT_RAW_BSON_OPTIONS`. diff --git a/bson/regex.py b/bson/regex.py index e3ca1ab69f..60cff4fd08 100644 --- a/bson/regex.py +++ b/bson/regex.py @@ -66,8 +66,7 @@ def from_native(cls: Type[Regex[Any]], regex: Pattern[_T]) -> Regex[_T]: >>> regex.flags ^= re.UNICODE >>> db.collection.insert_one({'pattern': regex}) - :Parameters: - - `regex`: A regular expression object from ``re.compile()``. + :param regex: A regular expression object from ``re.compile()``. .. warning:: Python regular expressions use a different syntax and different @@ -89,9 +88,8 @@ def __init__(self, pattern: _T, flags: Union[str, int] = 0) -> None: This class is useful to store and retrieve regular expressions that are incompatible with Python's regular expression dialect. - :Parameters: - - `pattern`: string - - `flags`: (optional) an integer bitmask, or a string of flag + :param pattern: string + :param flags: an integer bitmask, or a string of flag characters like "im" for IGNORECASE and MULTILINE """ if not isinstance(pattern, (str, bytes)): diff --git a/bson/timestamp.py b/bson/timestamp.py index 9bc6a715b6..3e76e7baad 100644 --- a/bson/timestamp.py +++ b/bson/timestamp.py @@ -47,11 +47,10 @@ def __init__(self, time: Union[datetime.datetime, int], inc: int) -> None: an instance of :class:`int`. Raises :class:`ValueError` if `time` or `inc` is not in [0, 2**32). - :Parameters: - - `time`: time in seconds since epoch UTC, or a naive UTC + :param time: time in seconds since epoch UTC, or a naive UTC :class:`~datetime.datetime`, or an aware :class:`~datetime.datetime` - - `inc`: the incrementing counter + :param inc: the incrementing counter """ if isinstance(time, datetime.datetime): offset = time.utcoffset() diff --git a/doc/conf.py b/doc/conf.py index 1ea51add88..f0d9f921bb 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -94,6 +94,8 @@ # -- Options for extensions ---------------------------------------------------- autoclass_content = "init" +autodoc_typehints = "description" + doctest_path = [Path("..").resolve()] doctest_test_doctest_blocks = "" diff --git a/gridfs/__init__.py b/gridfs/__init__.py index 63aa40623a..8d01fefce8 100644 --- a/gridfs/__init__.py +++ b/gridfs/__init__.py @@ -63,9 +63,8 @@ def __init__(self, database: Database, collection: str = "fs"): Raises :class:`TypeError` if `database` is not an instance of :class:`~pymongo.database.Database`. - :Parameters: - - `database`: database to use - - `collection` (optional): root collection to use + :param database: database to use + :param collection: root collection to use .. versionchanged:: 4.0 Removed the `disable_md5` parameter. See @@ -110,8 +109,7 @@ def new_file(self, **kwargs: Any) -> GridIn: not already exist in GridFS. Otherwise :class:`~gridfs.errors.FileExists` is raised. - :Parameters: - - `**kwargs` (optional): keyword arguments for file creation + :param kwargs: keyword arguments for file creation """ return GridIn(self.__collection, **kwargs) @@ -135,9 +133,8 @@ def put(self, data: Any, **kwargs: Any) -> Any: not already exist in GridFS. Otherwise :class:`~gridfs.errors.FileExists` is raised. - :Parameters: - - `data`: data to be written as a file. - - `**kwargs` (optional): keyword arguments for file creation + :param data: data to be written as a file. + :param kwargs: keyword arguments for file creation .. versionchanged:: 3.0 w=0 writes to GridFS are now prohibited. @@ -152,9 +149,8 @@ def get(self, file_id: Any, session: Optional[ClientSession] = None) -> GridOut: Returns an instance of :class:`~gridfs.grid_file.GridOut`, which provides a file-like interface for reading. - :Parameters: - - `file_id`: ``"_id"`` of the file to get - - `session` (optional): a + :param file_id: ``"_id"`` of the file to get + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -193,13 +189,12 @@ def get_version( Raises :class:`~gridfs.errors.NoFile` if no such version of that file exists. - :Parameters: - - `filename`: ``"filename"`` of the file to get, or `None` - - `version` (optional): version of the file to get (defaults + :param filename: ``"filename"`` of the file to get, or `None` + :param version: version of the file to get (defaults to -1, the most recent version uploaded) - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` - - `**kwargs` (optional): find files by custom metadata. + :param kwargs: find files by custom metadata. .. versionchanged:: 3.6 Added ``session`` parameter. @@ -235,11 +230,10 @@ def get_last_version( Equivalent to calling :meth:`get_version` with the default `version` (``-1``). - :Parameters: - - `filename`: ``"filename"`` of the file to get, or `None` - - `session` (optional): a + :param filename: ``"filename"`` of the file to get, or `None` + :param session: a :class:`~pymongo.client_session.ClientSession` - - `**kwargs` (optional): find files by custom metadata. + :param kwargs: find files by custom metadata. .. versionchanged:: 3.6 Added ``session`` parameter. @@ -261,9 +255,8 @@ def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: .. note:: Deletes of non-existent files are considered successful since the end result is the same: no file with that _id remains. - :Parameters: - - `file_id`: ``"_id"`` of the file to delete - - `session` (optional): a + :param file_id: ``"_id"`` of the file to delete + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -280,8 +273,7 @@ def list(self, session: Optional[ClientSession] = None) -> list[str]: """List the names of all files stored in this instance of :class:`GridFS`. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -309,19 +301,20 @@ def find_one( All arguments to :meth:`find` are also valid arguments for :meth:`find_one`, although any `limit` argument will be ignored. Returns a single :class:`~gridfs.grid_file.GridOut`, - or ``None`` if no matching file is found. For example:: + or ``None`` if no matching file is found. For example: + + .. code-block: python file = fs.find_one({"filename": "lisa.txt"}) - :Parameters: - - `filter` (optional): a dictionary specifying + :param filter: a dictionary specifying the query to be performing OR any other type to be used as the value for a query for ``"_id"`` in the file collection. - - `*args` (optional): any additional positional arguments are + :param args: any additional positional arguments are the same as the arguments to :meth:`find`. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` - - `**kwargs` (optional): any additional keyword arguments + :param kwargs: any additional keyword arguments are the same as the arguments to :meth:`find`. .. versionchanged:: 3.6 @@ -367,20 +360,19 @@ def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances are associated with that session. - :Parameters: - - `filter` (optional): A query document that selects which files + :param filter: A query document that selects which files to include in the result set. Can be an empty document to include all files. - - `skip` (optional): the number of files to omit (from + :param skip: the number of files to omit (from the start of the result set) when returning the results - - `limit` (optional): the maximum number of results to + :param limit: the maximum number of results to return - - `no_cursor_timeout` (optional): if False (the default), any + :param no_cursor_timeout: if False (the default), any returned cursor is closed by the server after 10 minutes of inactivity. If set to True, the returned cursor will never time out on the server. Care should be taken to ensure that cursors with no_cursor_timeout turned on are properly closed. - - `sort` (optional): a list of (key, direction) pairs + :param sort: a list of (key, direction) pairs specifying the sort order for this query. See :meth:`~pymongo.cursor.Cursor.sort` for details. @@ -429,12 +421,11 @@ def exists( create appropriate indexes; application developers should be sure to create indexes if needed and as appropriate. - :Parameters: - - `document_or_id` (optional): query document, or _id of the + :param document_or_id: query document, or _id of the document to check for - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` - - `**kwargs` (optional): keyword arguments are used as a + :param kwargs: keyword arguments are used as a query document, if they're present. .. versionchanged:: 3.6 @@ -468,15 +459,14 @@ def __init__( Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern` is not acknowledged. - :Parameters: - - `database`: database to use. - - `bucket_name` (optional): The name of the bucket. Defaults to 'fs'. - - `chunk_size_bytes` (optional): The chunk size in bytes. Defaults + :param database: database to use. + :param bucket_name: The name of the bucket. Defaults to 'fs'. + :param chunk_size_bytes: The chunk size in bytes. Defaults to 255KB. - - `write_concern` (optional): The + :param write_concern: The :class:`~pymongo.write_concern.WriteConcern` to use. If ``None`` (the default) db.write_concern is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) db.read_preference is used. .. versionchanged:: 4.0 @@ -545,14 +535,13 @@ def open_upload_stream( that file exists. Raises :exc:`~ValueError` if `filename` is not a string. - :Parameters: - - `filename`: The name of the file to upload. - - `chunk_size_bytes` (options): The number of bytes per chunk of this + :param filename: The name of the file to upload. + :param chunk_size_bytes` (options): The number of bytes per chunk of this file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the + :param metadata: User data for the 'metadata' field of the files collection document. If not provided the metadata field will be omitted from the files collection document. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -603,16 +592,15 @@ def open_upload_stream_with_id( that file exists. Raises :exc:`~ValueError` if `filename` is not a string. - :Parameters: - - `file_id`: The id to use for this file. The id must not have + :param file_id: The id to use for this file. The id must not have already been used for another file. - - `filename`: The name of the file to upload. - - `chunk_size_bytes` (options): The number of bytes per chunk of this + :param filename: The name of the file to upload. + :param chunk_size_bytes` (options): The number of bytes per chunk of this file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the + :param metadata: User data for the 'metadata' field of the files collection document. If not provided the metadata field will be omitted from the files collection document. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -661,16 +649,15 @@ def upload_from_stream( that file exists. Raises :exc:`~ValueError` if `filename` is not a string. - :Parameters: - - `filename`: The name of the file to upload. - - `source`: The source stream of the content to be uploaded. Must be + :param filename: The name of the file to upload. + :param source: The source stream of the content to be uploaded. Must be a file-like object that implements :meth:`read` or a string. - - `chunk_size_bytes` (options): The number of bytes per chunk of this + :param chunk_size_bytes` (options): The number of bytes per chunk of this file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the + :param metadata: User data for the 'metadata' field of the files collection document. If not provided the metadata field will be omitted from the files collection document. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -710,18 +697,17 @@ def upload_from_stream_with_id( that file exists. Raises :exc:`~ValueError` if `filename` is not a string. - :Parameters: - - `file_id`: The id to use for this file. The id must not have + :param file_id: The id to use for this file. The id must not have already been used for another file. - - `filename`: The name of the file to upload. - - `source`: The source stream of the content to be uploaded. Must be + :param filename: The name of the file to upload. + :param source: The source stream of the content to be uploaded. Must be a file-like object that implements :meth:`read` or a string. - - `chunk_size_bytes` (options): The number of bytes per chunk of this + :param chunk_size_bytes` (options): The number of bytes per chunk of this file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. - - `metadata` (optional): User data for the 'metadata' field of the + :param metadata: User data for the 'metadata' field of the files collection document. If not provided the metadata field will be omitted from the files collection document. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -751,9 +737,8 @@ def open_download_stream( Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - :Parameters: - - `file_id`: The _id of the file to be downloaded. - - `session` (optional): a + :param file_id: The _id of the file to be downloaded. + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -786,10 +771,9 @@ def download_to_stream( Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - :Parameters: - - `file_id`: The _id of the file to be downloaded. - - `destination`: a file-like object implementing :meth:`write`. - - `session` (optional): a + :param file_id: The _id of the file to be downloaded. + :param destination: a file-like object implementing :meth:`write`. + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -817,9 +801,8 @@ def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - :Parameters: - - `file_id`: The _id of the file to be deleted. - - `session` (optional): a + :param file_id: The _id of the file to be deleted. + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 @@ -864,17 +847,16 @@ def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances are associated with that session. - :Parameters: - - `filter`: Search query. - - `batch_size` (optional): The number of documents to return per + :param filter: Search query. + :param batch_size: The number of documents to return per batch. - - `limit` (optional): The maximum number of documents to return. - - `no_cursor_timeout` (optional): The server normally times out idle + :param limit: The maximum number of documents to return. + :param no_cursor_timeout: The server normally times out idle cursors after an inactivity period (10 minutes) to prevent excess memory use. Set this option to True prevent that. - - `skip` (optional): The number of documents to skip before + :param skip: The number of documents to skip before returning. - - `sort` (optional): The order by which to sort results. Defaults to + :param sort: The order by which to sort results. Defaults to None. """ return GridOutCursor(self._collection, *args, **kwargs) @@ -899,12 +881,11 @@ def open_download_stream_by_name( Raises :exc:`~ValueError` filename is not a string. - :Parameters: - - `filename`: The name of the file to read from. - - `revision` (optional): Which revision (documents with the same + :param filename: The name of the file to read from. + :param revision: Which revision (documents with the same filename and different uploadDate) of the file to retrieve. Defaults to -1 (the most recent revision). - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` :Note: Revision numbers are defined as follows: @@ -958,13 +939,12 @@ def download_to_stream_by_name( Raises :exc:`~ValueError` if `filename` is not a string. - :Parameters: - - `filename`: The name of the file to read from. - - `destination`: A file-like object that implements :meth:`write`. - - `revision` (optional): Which revision (documents with the same + :param filename: The name of the file to read from. + :param destination: A file-like object that implements :meth:`write`. + :param revision: Which revision (documents with the same filename and different uploadDate) of the file to retrieve. Defaults to -1 (the most recent revision). - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` :Note: Revision numbers are defined as follows: @@ -1001,10 +981,9 @@ def rename( Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. - :Parameters: - - `file_id`: The _id of the file to be renamed. - - `new_filename`: The new name of the file. - - `session` (optional): a + :param file_id: The _id of the file to be renamed. + :param new_filename: The new name of the file. + :param session: a :class:`~pymongo.client_session.ClientSession` .. versionchanged:: 3.6 diff --git a/gridfs/grid_file.py b/gridfs/grid_file.py index 685d097494..cb78c65cc6 100644 --- a/gridfs/grid_file.py +++ b/gridfs/grid_file.py @@ -152,12 +152,11 @@ def __init__( - ``"encoding"``: encoding used for this file. Any :class:`str` that is written to the file will be converted to :class:`bytes`. - :Parameters: - - `root_collection`: root collection to write to - - `session` (optional): a + :param root_collection: root collection to write to + :param session: a :class:`~pymongo.client_session.ClientSession` to use for all commands - - `**kwargs: Any` (optional): file level options (see above) + :param kwargs: Any: file level options (see above) .. versionchanged:: 4.0 Removed the `disable_md5` parameter. See @@ -344,8 +343,7 @@ def write(self, data: Any) -> None: Unicode data is only allowed if the file has an :attr:`encoding` attribute. - :Parameters: - - `data`: string of bytes or file-like object to be written + :param data: string of bytes or file-like object to be written to the file """ if self._closed: @@ -438,12 +436,11 @@ def __init__( :class:`TypeError` if `root_collection` is not an instance of :class:`~pymongo.collection.Collection`. - :Parameters: - - `root_collection`: root collection to read from - - `file_id` (optional): value of ``"_id"`` for the file to read - - `file_document` (optional): file document from + :param root_collection: root collection to read from + :param file_id: value of ``"_id"`` for the file to read + :param file_document: file document from `root_collection.files` - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` to use for all commands @@ -608,8 +605,7 @@ def read(self, size: int = -1) -> bytes: The bytes are returned as an instance of :class:`bytes` If `size` is negative or omitted all data is read. - :Parameters: - - `size` (optional): the number of bytes to read + :param size: the number of bytes to read .. versionchanged:: 3.8 This method now only checks for extra chunks after reading the @@ -621,8 +617,7 @@ def read(self, size: int = -1) -> bytes: def readline(self, size: int = -1) -> bytes: # type: ignore[override] """Read one line or up to `size` bytes from the file. - :Parameters: - - `size` (optional): the maximum number of bytes to read + :param size: the maximum number of bytes to read """ return self._read_size_or_line(size=size, line=True) @@ -633,10 +628,9 @@ def tell(self) -> int: def seek(self, pos: int, whence: int = _SEEK_SET) -> int: """Set the current position of this file. - :Parameters: - - `pos`: the position (or offset if using relative + :param pos: the position (or offset if using relative positioning) to seek to - - `whence` (optional): where to seek + :param whence: where to seek from. :attr:`os.SEEK_SET` (``0``) for absolute file positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative to the current position, :attr:`os.SEEK_END` (``2``) to diff --git a/pymongo/__init__.py b/pymongo/__init__.py index cdcbe5a5a0..758bb33ac8 100644 --- a/pymongo/__init__.py +++ b/pymongo/__init__.py @@ -159,11 +159,9 @@ def timeout(seconds: Optional[float]) -> ContextManager[None]: coll.find_one() # Still uses the original 5 second deadline. coll.find_one() # Uses the original 5 second deadline. - :Parameters: - - `seconds`: A non-negative floating point number expressing seconds, or None. + :param seconds: A non-negative floating point number expressing seconds, or None. - :Raises: - - :py:class:`ValueError`: When `seconds` is negative. + :raises: :py:class:`ValueError`: When `seconds` is negative. See :ref:`timeout-example` for more examples. diff --git a/pymongo/bulk.py b/pymongo/bulk.py index 10e77d8b12..cb60fe8756 100644 --- a/pymongo/bulk.py +++ b/pymongo/bulk.py @@ -89,18 +89,16 @@ def __init__(self, op_type: int) -> None: def index(self, idx: int) -> int: """Get the original index of an operation in this run. - :Parameters: - - `idx`: The Run index that maps to the original index. + :param idx: The Run index that maps to the original index. """ return self.index_map[idx] def add(self, original_index: int, operation: Any) -> None: """Add an operation to this Run instance. - :Parameters: - - `original_index`: The original index of this operation + :param original_index: The original index of this operation within a larger bulk operation. - - `operation`: The operation document. + :param operation: The operation document. """ self.index_map.append(original_index) self.ops.append(operation) diff --git a/pymongo/change_stream.py b/pymongo/change_stream.py index 75cd169790..3089a05d3d 100644 --- a/pymongo/change_stream.py +++ b/pymongo/change_stream.py @@ -356,8 +356,7 @@ def try_next(self) -> Optional[_DocumentType]: document is returned, otherwise, if the getMore returns no documents (because there have been no changes) then ``None`` is returned. - :Returns: - The next change document or ``None`` when no document is available + :return: The next change document or ``None`` when no document is available after running a single getMore or when the cursor is closed. .. versionadded:: 3.8 diff --git a/pymongo/client_session.py b/pymongo/client_session.py index 0aac770111..5a817fc48d 100644 --- a/pymongo/client_session.py +++ b/pymongo/client_session.py @@ -183,13 +183,12 @@ class SessionOptions: """Options for a new :class:`ClientSession`. - :Parameters: - - `causal_consistency` (optional): If True, read operations are causally + :param causal_consistency: If True, read operations are causally ordered within the session. Defaults to True when the ``snapshot`` option is ``False``. - - `default_transaction_options` (optional): The default + :param default_transaction_options: The default TransactionOptions to use for transactions started on this session. - - `snapshot` (optional): If True, then all reads performed using this + :param snapshot: If True, then all reads performed using this session will read from the same snapshot. This option is incompatible with ``causal_consistency=True``. Defaults to ``False``. @@ -247,21 +246,20 @@ def snapshot(self) -> Optional[bool]: class TransactionOptions: """Options for :meth:`ClientSession.start_transaction`. - :Parameters: - - `read_concern` (optional): The + :param read_concern: The :class:`~pymongo.read_concern.ReadConcern` to use for this transaction. If ``None`` (the default) the :attr:`read_preference` of the :class:`MongoClient` is used. - - `write_concern` (optional): The + :param write_concern: The :class:`~pymongo.write_concern.WriteConcern` to use for this transaction. If ``None`` (the default) the :attr:`read_preference` of the :class:`MongoClient` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` for options. Transactions which read must use :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. - - `max_commit_time_ms` (optional): The maximum amount of time to allow a + :param max_commit_time_ms: The maximum amount of time to allow a single commitTransaction command to run. This option is an alias for maxTimeMS option on the commitTransaction command. If ``None`` (the default) maxTimeMS is not used. @@ -655,24 +653,22 @@ def callback(session, custom_arg, custom_kwarg=None): timeout is reached will be re-raised. Applications that desire a different timeout duration should not use this method. - :Parameters: - - `callback`: The callable ``callback`` to run inside a transaction. + :param callback: The callable ``callback`` to run inside a transaction. The callable must accept a single argument, this session. Note, under certain error conditions the callback may be run multiple times. - - `read_concern` (optional): The + :param read_concern: The :class:`~pymongo.read_concern.ReadConcern` to use for this transaction. - - `write_concern` (optional): The + :param write_concern: The :class:`~pymongo.write_concern.WriteConcern` to use for this transaction. - - `read_preference` (optional): The read preference to use for this + :param read_preference: The read preference to use for this transaction. If ``None`` (the default) the :attr:`read_preference` of this :class:`Database` is used. See :mod:`~pymongo.read_preferences` for options. - :Returns: - The return value of the ``callback``. + :return: The return value of the ``callback``. .. versionadded:: 3.9 """ @@ -833,8 +829,7 @@ def abort_transaction(self) -> None: def _finish_transaction_with_retry(self, command_name: str) -> dict[str, Any]: """Run commit or abort with one retry after any retryable error. - :Parameters: - - `command_name`: Either "commitTransaction" or "abortTransaction". + :param command_name: Either "commitTransaction" or "abortTransaction". """ def func( @@ -882,8 +877,7 @@ def _advance_cluster_time(self, cluster_time: Optional[Mapping[str, Any]]) -> No def advance_cluster_time(self, cluster_time: Mapping[str, Any]) -> None: """Update the cluster time for this session. - :Parameters: - - `cluster_time`: The + :param cluster_time: The :data:`~pymongo.client_session.ClientSession.cluster_time` from another `ClientSession` instance. """ @@ -904,8 +898,7 @@ def _advance_operation_time(self, operation_time: Optional[Timestamp]) -> None: def advance_operation_time(self, operation_time: Timestamp) -> None: """Update the operation time for this session. - :Parameters: - - `operation_time`: The + :param operation_time: The :data:`~pymongo.client_session.ClientSession.operation_time` from another `ClientSession` instance. """ diff --git a/pymongo/collation.py b/pymongo/collation.py index e025abd80a..971628f4ec 100644 --- a/pymongo/collation.py +++ b/pymongo/collation.py @@ -97,22 +97,21 @@ class CollationCaseFirst: class Collation: """Collation - :Parameters: - - `locale`: (string) The locale of the collation. This should be a string + :param locale: (string) The locale of the collation. This should be a string that identifies an `ICU locale ID` exactly. For example, ``en_US`` is valid, but ``en_us`` and ``en-US`` are not. Consult the MongoDB documentation for a list of supported locales. - - `caseLevel`: (optional) If ``True``, turn on case sensitivity if + :param caseLevel: (optional) If ``True``, turn on case sensitivity if `strength` is 1 or 2 (case sensitivity is implied if `strength` is greater than 2). Defaults to ``False``. - - `caseFirst`: (optional) Specify that either uppercase or lowercase + :param caseFirst: (optional) Specify that either uppercase or lowercase characters take precedence. Must be one of the following values: * :data:`~CollationCaseFirst.UPPER` * :data:`~CollationCaseFirst.LOWER` * :data:`~CollationCaseFirst.OFF` (the default) - - `strength`: (optional) Specify the comparison strength. This is also + :param strength: Specify the comparison strength. This is also known as the ICU comparison level. This must be one of the following values: @@ -126,27 +125,27 @@ class Collation: `strength` of :data:`~CollationStrength.SECONDARY` differentiates characters based both on the unadorned base character and its accents. - - `numericOrdering`: (optional) If ``True``, order numbers numerically + :param numericOrdering: If ``True``, order numbers numerically instead of in collation order (defaults to ``False``). - - `alternate`: (optional) Specify whether spaces and punctuation are + :param alternate: Specify whether spaces and punctuation are considered base characters. This must be one of the following values: * :data:`~CollationAlternate.NON_IGNORABLE` (the default) * :data:`~CollationAlternate.SHIFTED` - - `maxVariable`: (optional) When `alternate` is + :param maxVariable: When `alternate` is :data:`~CollationAlternate.SHIFTED`, this option specifies what characters may be ignored. This must be one of the following values: * :data:`~CollationMaxVariable.PUNCT` (the default) * :data:`~CollationMaxVariable.SPACE` - - `normalization`: (optional) If ``True``, normalizes text into Unicode + :param normalization: If ``True``, normalizes text into Unicode NFD. Defaults to ``False``. - - `backwards`: (optional) If ``True``, accents on characters are + :param backwards: If ``True``, accents on characters are considered from the back of the word to the front, as it is done in some French dictionary ordering traditions. Defaults to ``False``. - - `kwargs`: (optional) Keyword arguments supplying any additional options + :param kwargs: Keyword arguments supplying any additional options to be sent with this Collation object. .. versionadded: 3.4 diff --git a/pymongo/collection.py b/pymongo/collection.py index 38804afd9b..95a9f24778 100644 --- a/pymongo/collection.py +++ b/pymongo/collection.py @@ -153,29 +153,28 @@ def __init__( use. The optional ``session`` argument is *only* used for the ``create`` command, it is not associated with the collection afterward. - :Parameters: - - `database`: the database to get a collection from - - `name`: the name of the collection to get - - `create` (optional): if ``True``, force collection + :param database: the database to get a collection from + :param name: the name of the collection to get + :param create: if ``True``, force collection creation even without options being set - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) database.codec_options is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) database.read_preference is used. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) database.write_concern is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) database.read_concern is used. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. If a collation is provided, it will be passed to the create collection command. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession` that is used with the create collection command - - `**kwargs` (optional): additional keyword arguments will + :param kwargs: additional keyword arguments will be passed as options for the create collection command .. versionchanged:: 4.2 @@ -278,30 +277,28 @@ def _command( ) -> Mapping[str, Any]: """Internal command helper. - :Parameters: - - `conn` - A Connection instance. - - `command` - The command itself, as a :class:`~bson.son.SON` instance. - - `read_preference` (optional) - The read preference to use. - - `codec_options` (optional) - An instance of + :param conn` - A Connection instance. + :param command` - The command itself, as a :class:`~bson.son.SON` instance. + :param read_preference` (optional) - The read preference to use. + :param codec_options` (optional) - An instance of :class:`~bson.codec_options.CodecOptions`. - - `check`: raise OperationFailure if there are errors - - `allowable_errors`: errors to ignore if `check` is True - - `read_concern` (optional) - An instance of + :param check: raise OperationFailure if there are errors + :param allowable_errors: errors to ignore if `check` is True + :param read_concern` (optional) - An instance of :class:`~pymongo.read_concern.ReadConcern`. - - `write_concern`: An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. - - `collation` (optional) - An instance of + :param collation` (optional) - An instance of :class:`~pymongo.collation.Collation`. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `retryable_write` (optional): True if this command is a retryable + :param retryable_write: True if this command is a retryable write. - - `user_fields` (optional): Response fields that should be decoded + :param user_fields: Response fields that should be decoded using the TypeDecoders from codec_options, passed to bson._decode_all_selective. - :Returns: - The result document. + :return: The result document. """ with self.__database.client._tmp_session(session) as s: return conn.command( @@ -361,8 +358,7 @@ def __getattr__(self, name: str) -> Collection[_DocumentType]: Raises InvalidName if an invalid collection name is used. - :Parameters: - - `name`: the name of the collection to get + :param name: the name of the collection to get """ if name.startswith("_"): full_name = f"{self.__name}.{name}" @@ -442,20 +438,19 @@ def with_options( >>> coll2.read_preference Secondary(tag_sets=None) - :Parameters: - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Collection` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Collection` is used. See :mod:`~pymongo.read_preferences` for options. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Collection` is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Collection` is used. @@ -516,27 +511,25 @@ def bulk_write( {'y': 1, '_id': ObjectId('54f62ee2fba5226811f634f1')} {'z': 1, '_id': ObjectId('54f62ee28891e756a6e1abd5')} - :Parameters: - - `requests`: A list of write operations (see examples above). - - `ordered` (optional): If ``True`` (the default) requests will be + :param requests: A list of write operations (see examples above). + :param ordered: If ``True`` (the default) requests will be performed on the server serially, in the order provided. If an error occurs all remaining operations are aborted. If ``False`` requests will be performed on the server in arbitrary order, possibly in parallel, and all operations will be attempted. - - `bypass_document_validation`: (optional) If ``True``, allows the + :param bypass_document_validation: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - :Returns: - An instance of :class:`~pymongo.results.BulkWriteResult`. + :return: An instance of :class:`~pymongo.results.BulkWriteResult`. .. seealso:: :ref:`writes-and-ids` @@ -628,20 +621,18 @@ def insert_one( >>> db.test.find_one({'x': 1}) {'x': 1, '_id': ObjectId('54f112defba522406c9cc208')} - :Parameters: - - `document`: The document to insert. Must be a mutable mapping + :param document: The document to insert. Must be a mutable mapping type. If the document does not have an _id field one will be added automatically. - - `bypass_document_validation`: (optional) If ``True``, allows the + :param bypass_document_validation: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - - An instance of :class:`~pymongo.results.InsertOneResult`. + :return: - An instance of :class:`~pymongo.results.InsertOneResult`. .. seealso:: :ref:`writes-and-ids` @@ -696,23 +687,21 @@ def insert_many( >>> db.test.count_documents({}) 2 - :Parameters: - - `documents`: A iterable of documents to insert. - - `ordered` (optional): If ``True`` (the default) documents will be + :param documents: A iterable of documents to insert. + :param ordered: If ``True`` (the default) documents will be inserted on the server serially, in the order provided. If an error occurs all remaining inserts are aborted. If ``False``, documents will be inserted on the server in arbitrary order, possibly in parallel, and all document inserts will be attempted. - - `bypass_document_validation`: (optional) If ``True``, allows the + :param bypass_document_validation: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - An instance of :class:`~pymongo.results.InsertManyResult`. + :return: An instance of :class:`~pymongo.results.InsertManyResult`. .. seealso:: :ref:`writes-and-ids` @@ -922,32 +911,30 @@ def replace_one( >>> db.test.find_one({'x': 1}) {'x': 1, '_id': ObjectId('54f11e5c8891e756a6e1abd4')} - :Parameters: - - `filter`: A query that matches the document to replace. - - `replacement`: The new document. - - `upsert` (optional): If ``True``, perform an insert if no documents + :param filter: A query that matches the document to replace. + :param replacement: The new document. + :param upsert: If ``True``, perform an insert if no documents match the filter. - - `bypass_document_validation`: (optional) If ``True``, allows the + :param bypass_document_validation: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.2 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - - An instance of :class:`~pymongo.results.UpdateResult`. + :return: - An instance of :class:`~pymongo.results.UpdateResult`. .. versionchanged:: 4.1 Added ``let`` parameter. @@ -1030,35 +1017,33 @@ def update_one( >>> db.test.find_one(result.upserted_id) {'_id': ObjectId('626a678eeaa80587d4bb3fb7'), 'x': -7} - :Parameters: - - `filter`: A query that matches the document to update. - - `update`: The modifications to apply. - - `upsert` (optional): If ``True``, perform an insert if no documents + :param filter: A query that matches the document to update. + :param update: The modifications to apply. + :param upsert: If ``True``, perform an insert if no documents match the filter. - - `bypass_document_validation`: (optional) If ``True``, allows the + :param bypass_document_validation: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `array_filters` (optional): A list of filters specifying which + :param array_filters: A list of filters specifying which array elements an update should apply. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.2 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - - An instance of :class:`~pymongo.results.UpdateResult`. + :return: - An instance of :class:`~pymongo.results.UpdateResult`. .. versionchanged:: 4.1 Added ``let`` parameter. @@ -1131,35 +1116,33 @@ def update_many( {'x': 4, '_id': 1} {'x': 4, '_id': 2} - :Parameters: - - `filter`: A query that matches the documents to update. - - `update`: The modifications to apply. - - `upsert` (optional): If ``True``, perform an insert if no documents + :param filter: A query that matches the documents to update. + :param update: The modifications to apply. + :param upsert: If ``True``, perform an insert if no documents match the filter. - - `bypass_document_validation` (optional): If ``True``, allows the + :param bypass_document_validation: If ``True``, allows the write to opt-out of document level validation. Default is ``False``. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `array_filters` (optional): A list of filters specifying which + :param array_filters: A list of filters specifying which array elements an update should apply. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.2 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - - An instance of :class:`~pymongo.results.UpdateResult`. + :return: - An instance of :class:`~pymongo.results.UpdateResult`. .. versionchanged:: 4.1 Added ``let`` parameter. @@ -1208,12 +1191,11 @@ def drop( ) -> None: """Alias for :meth:`~pymongo.database.Database.drop_collection`. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `encrypted_fields`: **(BETA)** Document that describes the encrypted fields for + :param encrypted_fields: **(BETA)** Document that describes the encrypted fields for Queryable Encryption. The following two calls are equivalent: @@ -1356,27 +1338,25 @@ def delete_one( >>> db.test.count_documents({'x': 1}) 2 - :Parameters: - - `filter`: A query that matches the document to delete. - - `collation` (optional): An instance of + :param filter: A query that matches the document to delete. + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.4 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - - An instance of :class:`~pymongo.results.DeleteResult`. + :return: - An instance of :class:`~pymongo.results.DeleteResult`. .. versionchanged:: 4.1 Added ``let`` parameter. @@ -1423,27 +1403,25 @@ def delete_many( >>> db.test.count_documents({'x': 1}) 0 - :Parameters: - - `filter`: A query that matches the documents to delete. - - `collation` (optional): An instance of + :param filter: A query that matches the documents to delete. + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.4 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - - An instance of :class:`~pymongo.results.DeleteResult`. + :return: - An instance of :class:`~pymongo.results.DeleteResult`. .. versionchanged:: 4.1 Added ``let`` parameter. @@ -1484,18 +1462,18 @@ def find_one( The :meth:`find_one` method obeys the :attr:`read_preference` of this :class:`Collection`. - :Parameters: - - - `filter` (optional): a dictionary specifying + :param filter: a dictionary specifying the query to be performed OR any other type to be used as the value for a query for ``"_id"``. - - `*args` (optional): any additional positional arguments + :param args: any additional positional arguments are the same as the arguments to :meth:`find`. - - `**kwargs` (optional): any additional keyword arguments + :param kwargs: any additional keyword arguments are the same as the arguments to :meth:`find`. + :: code-block: python + >>> collection.find_one(max_time_ms=100) """ @@ -1528,28 +1506,27 @@ def find(self, *args: Any, **kwargs: Any) -> Cursor[_DocumentType]: The :meth:`find` method obeys the :attr:`read_preference` of this :class:`Collection`. - :Parameters: - - `filter` (optional): A query document that selects which documents + :param filter: A query document that selects which documents to include in the result set. Can be an empty document to include all documents. - - `projection` (optional): a list of field names that should be + :param projection: a list of field names that should be returned in the result set or a dict specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a dict to exclude fields from the result (e.g. projection={'_id': False}). - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `skip` (optional): the number of documents to omit (from + :param skip: the number of documents to omit (from the start of the result set) when returning the results - - `limit` (optional): the maximum number of results to + :param limit: the maximum number of results to return. A limit of 0 (the default) is equivalent to setting no limit. - - `no_cursor_timeout` (optional): if False (the default), any + :param no_cursor_timeout: if False (the default), any returned cursor is closed by the server after 10 minutes of inactivity. If set to True, the returned cursor will never time out on the server. Care should be taken to ensure that cursors with no_cursor_timeout turned on are properly closed. - - `cursor_type` (optional): the type of cursor to return. The valid + :param cursor_type: the type of cursor to return. The valid options are defined by :class:`~pymongo.cursor.CursorType`: - :attr:`~pymongo.cursor.CursorType.NON_TAILABLE` - the result of @@ -1572,53 +1549,53 @@ def find(self, *args: Any, **kwargs: Any) -> Cursor[_DocumentType]: results to the client without waiting for the client to request each batch, reducing latency. See notes on compatibility below. - - `sort` (optional): a list of (key, direction) pairs + :param sort: a list of (key, direction) pairs specifying the sort order for this query. See :meth:`~pymongo.cursor.Cursor.sort` for details. - - `allow_partial_results` (optional): if True, mongos will return + :param allow_partial_results: if True, mongos will return partial results if some shards are down instead of returning an error. - - `oplog_replay` (optional): **DEPRECATED** - if True, set the + :param oplog_replay: **DEPRECATED** - if True, set the oplogReplay query flag. Default: False. - - `batch_size` (optional): Limits the number of documents returned in + :param batch_size: Limits the number of documents returned in a single batch. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `return_key` (optional): If True, return only the index keys in + :param return_key: If True, return only the index keys in each document. - - `show_record_id` (optional): If True, adds a field ``$recordId`` in + :param show_record_id: If True, adds a field ``$recordId`` in each document with the storage engine's internal record identifier. - - `snapshot` (optional): **DEPRECATED** - If True, prevents the + :param snapshot: **DEPRECATED** - If True, prevents the cursor from returning a document more than once because of an intervening write operation. - - `hint` (optional): An index, in the same format as passed to + :param hint: An index, in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). Pass this as an alternative to calling :meth:`~pymongo.cursor.Cursor.hint` on the cursor to tell Mongo the proper index to use for the query. - - `max_time_ms` (optional): Specifies a time limit for a query + :param max_time_ms: Specifies a time limit for a query operation. If the specified time is exceeded, the operation will be aborted and :exc:`~pymongo.errors.ExecutionTimeout` is raised. Pass this as an alternative to calling :meth:`~pymongo.cursor.Cursor.max_time_ms` on the cursor. - - `max_scan` (optional): **DEPRECATED** - The maximum number of + :param max_scan: **DEPRECATED** - The maximum number of documents to scan. Pass this as an alternative to calling :meth:`~pymongo.cursor.Cursor.max_scan` on the cursor. - - `min` (optional): A list of field, limit pairs specifying the + :param min: A list of field, limit pairs specifying the inclusive lower bound for all keys of a specific index in order. Pass this as an alternative to calling :meth:`~pymongo.cursor.Cursor.min` on the cursor. ``hint`` must also be passed to ensure the query utilizes the correct index. - - `max` (optional): A list of field, limit pairs specifying the + :param max: A list of field, limit pairs specifying the exclusive upper bound for all keys of a specific index in order. Pass this as an alternative to calling :meth:`~pymongo.cursor.Cursor.max` on the cursor. ``hint`` must also be passed to ensure the query utilizes the correct index. - - `comment` (optional): A string to attach to the query to help + :param comment: A string to attach to the query to help interpret and trace the operation in the server logs and in profile data. Pass this as an alternative to calling :meth:`~pymongo.cursor.Cursor.comment` on the cursor. - - `allow_disk_use` (optional): if True, MongoDB may use temporary + :param allow_disk_use: if True, MongoDB may use temporary disk files to store data exceeding the system memory limit while processing a blocking sort operation. The option has no effect if MongoDB can satisfy the specified sort using an index, or if the @@ -1789,10 +1766,9 @@ def estimated_document_count(self, comment: Optional[Any] = None, **kwargs: Any) - `maxTimeMS` (int): The maximum amount of time to allow this operation to run, in milliseconds. - :Parameters: - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): See list of options above. + :param kwargs: See list of options above. .. versionchanged:: 4.2 This method now always uses the `count`_ command. Due to an oversight in versions @@ -1866,15 +1842,14 @@ def count_documents( | $nearSphere | `$geoWithin`_ with `$centerSphere`_ | +-------------+-------------------------------------+ - :Parameters: - - `filter` (required): A query document that selects which documents + :param filter: A query document that selects which documents to count in the collection. Can be an empty document to count all documents. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): See list of options above. + :param kwargs: See list of options above. .. versionadded:: 3.7 @@ -1937,14 +1912,13 @@ def create_indexes( >>> db.test.create_indexes([index1, index2]) ["hello_world", "goodbye_-1"] - :Parameters: - - `indexes`: A list of :class:`~pymongo.operations.IndexModel` + :param indexes: A list of :class:`~pymongo.operations.IndexModel` instances. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the createIndexes + :param kwargs: optional arguments to the createIndexes command (like maxTimeMS) can be passed as keyword arguments. @@ -1975,12 +1949,11 @@ def __create_indexes( ) -> list[str]: """Internal createIndexes helper. - :Parameters: - - `indexes`: A list of :class:`~pymongo.operations.IndexModel` + :param indexes: A list of :class:`~pymongo.operations.IndexModel` instances. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `**kwargs` (optional): optional arguments to the createIndexes + :param kwargs: optional arguments to the createIndexes command (like maxTimeMS) can be passed as keyword arguments. """ names = [] @@ -2091,14 +2064,13 @@ def create_index( .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of this collection is automatically applied to this operation. - :Parameters: - - `keys`: a single key or a list of (key, direction) + :param keys: a single key or a list of (key, direction) pairs specifying the index to create - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): any additional index creation + :param kwargs: any additional index creation options (see the above list) should be passed as keyword arguments. @@ -2145,12 +2117,11 @@ def drop_indexes( Can be used on non-existent collections or collections with no indexes. Raises OperationFailure on an error. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the createIndexes + :param kwargs: optional arguments to the createIndexes command (like maxTimeMS) can be passed as keyword arguments. .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of @@ -2192,13 +2163,12 @@ def drop_index( passing the `name` parameter to :meth:`create_index`) the index **must** be dropped by name. - :Parameters: - - `index_or_name`: index (or name of index) to drop - - `session` (optional): a + :param index_or_name: index (or name of index) to drop + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the createIndexes + :param kwargs: optional arguments to the createIndexes command (like maxTimeMS) can be passed as keyword arguments. @@ -2249,14 +2219,12 @@ def list_indexes( ... SON([('v', 2), ('key', SON([('_id', 1)])), ('name', '_id_')]) - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - :Returns: - An instance of :class:`~pymongo.command_cursor.CommandCursor`. + :return: An instance of :class:`~pymongo.command_cursor.CommandCursor`. .. versionchanged:: 4.1 Added ``comment`` parameter. @@ -2331,10 +2299,9 @@ def index_information( {'_id_': {'key': [('_id', 1)]}, 'x_1': {'unique': True, 'key': [('x', 1)]}} - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. .. versionchanged:: 4.1 @@ -2360,17 +2327,15 @@ def list_search_indexes( ) -> CommandCursor[Mapping[str, Any]]: """Return a cursor over search indexes for the current collection. - :Parameters: - - `name` (optional): If given, the name of the index to search + :param name: If given, the name of the index to search for. Only indexes with matching index names will be returned. If not given, all search indexes for the current collection will be returned. - - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param session: a :class:`~pymongo.client_session.ClientSession`. + :param comment: A user-provided comment to attach to this command. - :Returns: - A :class:`~pymongo.command_cursor.CommandCursor` over the result + :return: A :class:`~pymongo.command_cursor.CommandCursor` over the result set. .. note:: requires a MongoDB server version 7.0+ Atlas cluster. @@ -2410,20 +2375,18 @@ def create_search_index( ) -> str: """Create a single search index for the current collection. - :Parameters: - - `model`: The model for the new search index. + :param model: The model for the new search index. It can be given as a :class:`~pymongo.operations.SearchIndexModel` instance or a dictionary with a model "definition" and optional "name". - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the createSearchIndexes + :param kwargs: optional arguments to the createSearchIndexes command (like maxTimeMS) can be passed as keyword arguments. - :Returns: - The name of the new search index. + :return: The name of the new search index. .. note:: requires a MongoDB server version 7.0+ Atlas cluster. @@ -2442,16 +2405,14 @@ def create_search_indexes( ) -> list[str]: """Create multiple search indexes for the current collection. - :Parameters: - - `models`: A list of :class:`~pymongo.operations.SearchIndexModel` instances. - - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param models: A list of :class:`~pymongo.operations.SearchIndexModel` instances. + :param session: a :class:`~pymongo.client_session.ClientSession`. + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the createSearchIndexes + :param kwargs: optional arguments to the createSearchIndexes command (like maxTimeMS) can be passed as keyword arguments. - :Returns: - A list of the newly created search index names. + :return: A list of the newly created search index names. .. note:: requires a MongoDB server version 7.0+ Atlas cluster. @@ -2489,13 +2450,12 @@ def drop_search_index( ) -> None: """Delete a search index by index name. - :Parameters: - - `name`: The name of the search index to be deleted. - - `session` (optional): a + :param name: The name of the search index to be deleted. + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the dropSearchIndexes + :param kwargs: optional arguments to the dropSearchIndexes command (like maxTimeMS) can be passed as keyword arguments. .. note:: requires a MongoDB server version 7.0+ Atlas cluster. @@ -2525,14 +2485,13 @@ def update_search_index( ) -> None: """Update a search index by replacing the existing index definition with the provided definition. - :Parameters: - - `name`: The name of the search index to be updated. - - `definition`: The new search index definition. - - `session` (optional): a + :param name: The name of the search index to be updated. + :param definition: The new search index definition. + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): optional arguments to the updateSearchIndexes + :param kwargs: optional arguments to the updateSearchIndexes command (like maxTimeMS) can be passed as keyword arguments. .. note:: requires a MongoDB server version 7.0+ Atlas cluster. @@ -2564,10 +2523,9 @@ def options( information on the possible options. Returns an empty dictionary if the collection has not been created yet. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. .. versionchanged:: 3.6 @@ -2654,11 +2612,17 @@ def aggregate( .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of this collection is automatically applied to this operation. - :Parameters: - - `pipeline`: a list of aggregation pipeline stages - - `session` (optional): a + :param pipeline: a list of aggregation pipeline stages + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `**kwargs` (optional): extra `aggregate command`_ parameters. + :param let: A dict of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. ``"$$var"``). This option is + only supported on MongoDB >= 5.0. + :param comment: A user-provided comment to attach to this + command. + :param kwargs: extra `aggregate command`_ parameters. All optional `aggregate command`_ parameters should be passed as keyword arguments to this method. Valid options include, but are not @@ -2674,17 +2638,9 @@ def aggregate( returning aggregate results using a cursor. - `collation` (optional): An instance of :class:`~pymongo.collation.Collation`. - - `let` (dict): A dict of parameter names and values. Values must be - constant or closed expressions that do not reference document - fields. Parameters can then be accessed as variables in an - aggregate expression context (e.g. ``"$$var"``). This option is - only supported on MongoDB >= 5.0. - - `comment` (optional): A user-provided comment to attach to this - command. - :Returns: - A :class:`~pymongo.command_cursor.CommandCursor` over the result + :return: A :class:`~pymongo.command_cursor.CommandCursor` over the result set. .. versionchanged:: 4.1 @@ -2835,47 +2791,45 @@ def watch( ``ReadConcern("majority")`` in order to use the ``$changeStream`` stage. - :Parameters: - - `pipeline` (optional): A list of aggregation pipeline stages to + :param pipeline: A list of aggregation pipeline stages to append to an initial ``$changeStream`` stage. Not all pipeline stages are valid after a ``$changeStream`` stage, see the MongoDB documentation on change streams for the supported stages. - - `full_document` (optional): The fullDocument to pass as an option + :param full_document: The fullDocument to pass as an option to the ``$changeStream`` stage. Allowed values: 'updateLookup', 'whenAvailable', 'required'. When set to 'updateLookup', the change notification for partial updates will include both a delta describing the changes to the document, as well as a copy of the entire document that was changed from some time after the change occurred. - - `full_document_before_change`: Allowed values: 'whenAvailable' + :param full_document_before_change: Allowed values: 'whenAvailable' and 'required'. Change events may now result in a 'fullDocumentBeforeChange' response field. - - `resume_after` (optional): A resume token. If provided, the + :param resume_after: A resume token. If provided, the change stream will start returning changes that occur directly after the operation specified in the resume token. A resume token is the _id value of a change document. - - `max_await_time_ms` (optional): The maximum time in milliseconds + :param max_await_time_ms: The maximum time in milliseconds for the server to wait for changes before responding to a getMore operation. - - `batch_size` (optional): The maximum number of documents to return + :param batch_size: The maximum number of documents to return per batch. - - `collation` (optional): The :class:`~pymongo.collation.Collation` + :param collation: The :class:`~pymongo.collation.Collation` to use for the aggregation. - - `start_at_operation_time` (optional): If provided, the resulting + :param start_at_operation_time: If provided, the resulting change stream will only return changes that occurred at or after the specified :class:`~bson.timestamp.Timestamp`. Requires MongoDB >= 4.0. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `start_after` (optional): The same as `resume_after` except that + :param start_after: The same as `resume_after` except that `start_after` can resume notifications after an invalidate event. This option and `resume_after` are mutually exclusive. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. + :param show_expanded_events: Include expanded events such as DDL events like `dropIndexes`. - :Returns: - A :class:`~pymongo.change_stream.CollectionChangeStream` cursor. + :return: A :class:`~pymongo.change_stream.CollectionChangeStream` cursor. .. versionchanged:: 4.3 Added `show_expanded_events` parameter. @@ -2931,13 +2885,12 @@ def rename( Raises :class:`~pymongo.errors.InvalidName` if `new_name` is not a valid collection name. - :Parameters: - - `new_name`: new name for this collection - - `session` (optional): a + :param new_name: new name for this collection + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): additional arguments to the rename command + :param kwargs: additional arguments to the rename command may be passed as keyword arguments to this helper method (i.e. ``dropTarget=True``) @@ -3005,16 +2958,15 @@ def distinct( The :meth:`distinct` method obeys the :attr:`read_preference` of this :class:`Collection`. - :Parameters: - - `key`: name of the field for which we want to get the distinct + :param key: name of the field for which we want to get the distinct values - - `filter` (optional): A query document that specifies the documents + :param filter: A query document that specifies the documents from which to retrieve the distinct values. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): See list of options above. + :param kwargs: See list of options above. .. versionchanged:: 3.6 Added ``session`` parameter. @@ -3175,30 +3127,29 @@ def find_one_and_delete( >>> db.test.find_one_and_delete({'x': 1}, projection={'_id': False}) {'x': 1} - :Parameters: - - `filter`: A query that matches the document to delete. - - `projection` (optional): a list of field names that should be + :param filter: A query that matches the document to delete. + :param projection: a list of field names that should be returned in the result document or a mapping specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a mapping to exclude fields from the result (e.g. projection={'_id': False}). - - `sort` (optional): a list of (key, direction) pairs + :param sort: a list of (key, direction) pairs specifying the sort order for the query. If multiple documents match the query, they are sorted and the first is deleted. - - `hint` (optional): An index to use to support the query predicate + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.4 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): additional command arguments can be passed + :param kwargs: additional command arguments can be passed as keyword arguments (for example maxTimeMS can be used with recent server versions). @@ -3264,40 +3215,39 @@ def find_one_and_replace( {'x': 1, '_id': 1} {'x': 1, '_id': 2} - :Parameters: - - `filter`: A query that matches the document to replace. - - `replacement`: The replacement document. - - `projection` (optional): A list of field names that should be + :param filter: A query that matches the document to replace. + :param replacement: The replacement document. + :param projection: A list of field names that should be returned in the result document or a mapping specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a mapping to exclude fields from the result (e.g. projection={'_id': False}). - - `sort` (optional): a list of (key, direction) pairs + :param sort: a list of (key, direction) pairs specifying the sort order for the query. If multiple documents match the query, they are sorted and the first is replaced. - - `upsert` (optional): When ``True``, inserts a new document if no + :param upsert: When ``True``, inserts a new document if no document matches the query. Defaults to ``False``. - - `return_document`: If + :param return_document: If :attr:`ReturnDocument.BEFORE` (the default), returns the original document before it was replaced, or ``None`` if no document matches. If :attr:`ReturnDocument.AFTER`, returns the replaced or inserted document. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.4 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): additional command arguments can be passed + :param kwargs: additional command arguments can be passed as keyword arguments (for example maxTimeMS can be used with recent server versions). @@ -3411,41 +3361,40 @@ def find_one_and_update( ... sort=[('_id', pymongo.DESCENDING)]) {'_id': 701, 'done': True, 'result': {'count': 17}} - :Parameters: - - `filter`: A query that matches the document to update. - - `update`: The update operations to apply. - - `projection` (optional): A list of field names that should be + :param filter: A query that matches the document to update. + :param update: The update operations to apply. + :param projection: A list of field names that should be returned in the result document or a mapping specifying the fields to include or exclude. If `projection` is a list "_id" will always be returned. Use a dict to exclude fields from the result (e.g. projection={'_id': False}). - - `sort` (optional): a list of (key, direction) pairs + :param sort: a list of (key, direction) pairs specifying the sort order for the query. If multiple documents match the query, they are sorted and the first is updated. - - `upsert` (optional): When ``True``, inserts a new document if no + :param upsert: When ``True``, inserts a new document if no document matches the query. Defaults to ``False``. - - `return_document`: If + :param return_document: If :attr:`ReturnDocument.BEFORE` (the default), returns the original document before it was updated. If :attr:`ReturnDocument.AFTER`, returns the updated or inserted document. - - `array_filters` (optional): A list of filters specifying which + :param array_filters: A list of filters specifying which array elements an update should apply. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.4 and above. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `let` (optional): Map of parameter names and values. Values must be + :param let: Map of parameter names and values. Values must be constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): additional command arguments can be passed + :param kwargs: additional command arguments can be passed as keyword arguments (for example maxTimeMS can be used with recent server versions). diff --git a/pymongo/command_cursor.py b/pymongo/command_cursor.py index 42becece28..0411a45abe 100644 --- a/pymongo/command_cursor.py +++ b/pymongo/command_cursor.py @@ -134,8 +134,7 @@ def batch_size(self, batch_size: int) -> CommandCursor[_DocumentType]: Raises :exc:`TypeError` if `batch_size` is not an integer. Raises :exc:`ValueError` if `batch_size` is less than ``0``. - :Parameters: - - `batch_size`: The size of each batch of results requested. + :param batch_size: The size of each batch of results requested. """ if not isinstance(batch_size, int): raise TypeError("batch_size must be an integer") @@ -335,8 +334,7 @@ def try_next(self) -> Optional[_DocumentType]: document is returned, otherwise, if the getMore returns no documents (because there is no additional data) then ``None`` is returned. - :Returns: - The next document or ``None`` when no document is available + :return: The next document or ``None`` when no document is available after running a single getMore or when the cursor is closed. .. versionadded:: 4.5 diff --git a/pymongo/common.py b/pymongo/common.py index e3da3a5f69..bda294af93 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -828,9 +828,8 @@ def get_validated_options( """Validate each entry in options and raise a warning if it is not valid. Returns a copy of options with invalid entries removed. - :Parameters: - - `opts`: A dict containing MongoDB URI options. - - `warn` (optional): If ``True`` then warnings will be logged and + :param opts: A dict containing MongoDB URI options. + :param warn: If ``True`` then warnings will be logged and invalid options will be ignored. Otherwise, invalid options will cause errors. """ diff --git a/pymongo/cursor.py b/pymongo/cursor.py index 23b7597efc..d2ee09f09a 100644 --- a/pymongo/cursor.py +++ b/pymongo/cursor.py @@ -548,8 +548,7 @@ def allow_disk_use(self, allow_disk_use: bool) -> Cursor[_DocumentType]: .. note:: `allow_disk_use` requires server version **>= 4.4** - :Parameters: - - `allow_disk_use`: if True, MongoDB may use temporary + :param allow_disk_use: if True, MongoDB may use temporary disk files to store data exceeding the system memory limit while processing a blocking sort operation. @@ -570,8 +569,7 @@ def limit(self, limit: int) -> Cursor[_DocumentType]: has already been used. The last `limit` applied to this cursor takes precedence. A limit of ``0`` is equivalent to no limit. - :Parameters: - - `limit`: the number of results to return + :param limit: the number of results to return .. seealso:: The MongoDB documentation on `limit `_. """ @@ -601,8 +599,7 @@ def batch_size(self, batch_size: int) -> Cursor[_DocumentType]: :class:`Cursor` has already been used. The last `batch_size` applied to this cursor takes precedence. - :Parameters: - - `batch_size`: The size of each batch of results requested. + :param batch_size: The size of each batch of results requested. """ if not isinstance(batch_size, int): raise TypeError("batch_size must be an integer") @@ -622,8 +619,7 @@ def skip(self, skip: int) -> Cursor[_DocumentType]: already been used. The last `skip` applied to this cursor takes precedence. - :Parameters: - - `skip`: the number of results to skip + :param skip: the number of results to skip """ if not isinstance(skip, int): raise TypeError("skip must be an integer") @@ -644,8 +640,7 @@ def max_time_ms(self, max_time_ms: Optional[int]) -> Cursor[_DocumentType]: Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has already been used. - :Parameters: - - `max_time_ms`: the time limit after which the operation is aborted + :param max_time_ms: the time limit after which the operation is aborted """ if not isinstance(max_time_ms, int) and max_time_ms is not None: raise TypeError("max_time_ms must be an integer or None") @@ -665,8 +660,7 @@ def max_await_time_ms(self, max_await_time_ms: Optional[int]) -> Cursor[_Documen .. note:: `max_await_time_ms` requires server version **>= 3.2** - :Parameters: - - `max_await_time_ms`: the time limit after which the operation is + :param max_await_time_ms: the time limit after which the operation is aborted .. versionadded:: 3.2 @@ -727,8 +721,7 @@ def __getitem__(self, index: Union[int, slice]) -> Union[_DocumentType, Cursor[_ start value, or a stop value less than or equal to the start value. - :Parameters: - - `index`: An integer or slice index to be applied to this cursor + :param index: An integer or slice index to be applied to this cursor """ self.__check_okay_to_chain() self.__empty = False @@ -777,8 +770,7 @@ def max_scan(self, max_scan: Optional[int]) -> Cursor[_DocumentType]: cursor has already been used. Only the last :meth:`max_scan` applied to this cursor has any effect. - :Parameters: - - `max_scan`: the maximum number of documents to scan + :param max_scan: the maximum number of documents to scan .. versionchanged:: 3.7 Deprecated :meth:`max_scan`. Support for this option is deprecated in @@ -796,8 +788,7 @@ def max(self, spec: _Sort) -> Cursor[_DocumentType]: the query uses the expected index and starting in MongoDB 4.2 :meth:`~hint` will be required. - :Parameters: - - `spec`: a list of field, limit pairs specifying the exclusive + :param spec: a list of field, limit pairs specifying the exclusive upper bound for all keys of a specific index in order. .. versionchanged:: 3.8 @@ -819,8 +810,7 @@ def min(self, spec: _Sort) -> Cursor[_DocumentType]: the query uses the expected index and starting in MongoDB 4.2 :meth:`~hint` will be required. - :Parameters: - - `spec`: a list of field, limit pairs specifying the inclusive + :param spec: a list of field, limit pairs specifying the inclusive lower bound for all keys of a specific index in order. .. versionchanged:: 3.8 @@ -873,10 +863,9 @@ def sort( already been used. Only the last :meth:`sort` applied to this cursor has any effect. - :Parameters: - - `key_or_list`: a single key or a list of (key, direction) + :param key_or_list: a single key or a list of (key, direction) pairs specifying the keys to sort on - - `direction` (optional): only used if `key_or_list` is a single + :param direction: only used if `key_or_list` is a single key, if not given :data:`~pymongo.ASCENDING` is assumed """ self.__check_okay_to_chain() @@ -896,8 +885,7 @@ def distinct(self, key: str) -> list: :class:`~pymongo.collection.Collection` instance on which :meth:`~pymongo.collection.Collection.find` was called. - :Parameters: - - `key`: name of key for which we want to get the distinct values + :param key: name of key for which we want to get the distinct values .. seealso:: :meth:`pymongo.collection.Collection.distinct` """ @@ -961,8 +949,7 @@ def hint(self, index: Optional[_Hint]) -> Cursor[_DocumentType]: cleared. The last hint applied to this cursor takes precedence over all others. - :Parameters: - - `index`: index to hint on (as an index specifier) + :param index: index to hint on (as an index specifier) """ self.__check_okay_to_chain() self.__set_hint(index) @@ -973,8 +960,7 @@ def comment(self, comment: Any) -> Cursor[_DocumentType]: http://mongodb.com/docs/manual/reference/operator/comment/ - :Parameters: - - `comment`: A string to attach to the query to help interpret and + :param comment: A string to attach to the query to help interpret and trace the operation in the server logs and in profile data. .. versionadded:: 2.7 @@ -1005,8 +991,7 @@ def where(self, code: Union[str, Code]) -> Cursor[_DocumentType]: .. note:: MongoDB 4.4 drops support for :class:`~bson.code.Code` with scope variables. Consider using `$expr`_ instead. - :Parameters: - - `code`: JavaScript expression to use as a filter + :param code: JavaScript expression to use as a filter .. _$expr: https://mongodb.com/docs/manual/reference/operator/query/expr/ .. _$where: https://mongodb.com/docs/manual/reference/operator/query/where/ @@ -1035,8 +1020,7 @@ def collation(self, collation: Optional[_CollationIn]) -> Cursor[_DocumentType]: already been used. Only the last collation applied to this cursor has any effect. - :Parameters: - - `collation`: An instance of :class:`~pymongo.collation.Collation`. + :param collation: An instance of :class:`~pymongo.collation.Collation`. """ self.__check_okay_to_chain() self.__collation = validate_collation_or_none(collation) diff --git a/pymongo/database.py b/pymongo/database.py index 75eabc5d34..fd28d8bf5b 100644 --- a/pymongo/database.py +++ b/pymongo/database.py @@ -87,18 +87,17 @@ def __init__( :class:`str`. Raises :class:`~pymongo.errors.InvalidName` if `name` is not a valid database name. - :Parameters: - - `client`: A :class:`~pymongo.mongo_client.MongoClient` instance. - - `name`: The database name. - - `codec_options` (optional): An instance of + :param client: A :class:`~pymongo.mongo_client.MongoClient` instance. + :param name: The database name. + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) client.codec_options is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) client.read_preference is used. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) client.write_concern is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) client.read_concern is used. @@ -171,20 +170,19 @@ def with_options( >>> db2.read_preference Secondary(tag_sets=[{'node': 'analytics'}], max_staleness=-1, hedge=None) - :Parameters: - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Collection` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Collection` is used. See :mod:`~pymongo.read_preferences` for options. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Collection` is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Collection` is used. @@ -219,8 +217,7 @@ def __getattr__(self, name: str) -> Collection[_DocumentType]: Raises InvalidName if an invalid collection name is used. - :Parameters: - - `name`: the name of the collection to get + :param name: the name of the collection to get """ if name.startswith("_"): raise AttributeError( @@ -234,8 +231,7 @@ def __getitem__(self, name: str) -> Collection[_DocumentType]: Raises InvalidName if an invalid collection name is used. - :Parameters: - - `name`: the name of the collection to get + :param name: the name of the collection to get """ return Collection(self, name) @@ -265,21 +261,20 @@ def get_collection( >>> coll2.read_preference Secondary(tag_sets=None) - :Parameters: - - `name`: The name of the collection - a string. - - `codec_options` (optional): An instance of + :param name: The name of the collection - a string. + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Database` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Database` is used. See :mod:`~pymongo.read_preferences` for options. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Database` is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Database` is used. @@ -341,30 +336,29 @@ def create_collection( creation. :class:`~pymongo.errors.CollectionInvalid` will be raised if the collection already exists. - :Parameters: - - `name`: the name of the collection to create - - `codec_options` (optional): An instance of + :param name: the name of the collection to create + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`Database` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`Database` is used. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`Database` is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`Database` is used. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - ``check_exists`` (optional): if True (the default), send a listCollections command to + :param `check_exists`: if True (the default), send a listCollections command to check if the collection already exists before creation. - - `**kwargs` (optional): additional keyword arguments will + :param kwargs: additional keyword arguments will be passed as options for the `create collection command`_ All optional `create collection command`_ parameters should be passed @@ -503,11 +497,10 @@ def aggregate( .. note:: The :attr:`~pymongo.database.Database.write_concern` of this collection is automatically applied to this operation. - :Parameters: - - `pipeline`: a list of aggregation pipeline stages - - `session` (optional): a + :param pipeline: a list of aggregation pipeline stages + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `**kwargs` (optional): extra `aggregate command`_ parameters. + :param kwargs: extra `aggregate command`_ parameters. All optional `aggregate command`_ parameters should be passed as keyword arguments to this method. Valid options include, but are not @@ -529,8 +522,7 @@ def aggregate( aggregate expression context (e.g. ``"$$var"``). This option is only supported on MongoDB >= 5.0. - :Returns: - A :class:`~pymongo.command_cursor.CommandCursor` over the result + :return: A :class:`~pymongo.command_cursor.CommandCursor` over the result set. .. versionadded:: 3.9 @@ -610,47 +602,45 @@ def watch( For a precise description of the resume process see the `change streams specification`_. - :Parameters: - - `pipeline` (optional): A list of aggregation pipeline stages to + :param pipeline: A list of aggregation pipeline stages to append to an initial ``$changeStream`` stage. Not all pipeline stages are valid after a ``$changeStream`` stage, see the MongoDB documentation on change streams for the supported stages. - - `full_document` (optional): The fullDocument to pass as an option + :param full_document: The fullDocument to pass as an option to the ``$changeStream`` stage. Allowed values: 'updateLookup', 'whenAvailable', 'required'. When set to 'updateLookup', the change notification for partial updates will include both a delta describing the changes to the document, as well as a copy of the entire document that was changed from some time after the change occurred. - - `full_document_before_change`: Allowed values: 'whenAvailable' + :param full_document_before_change: Allowed values: 'whenAvailable' and 'required'. Change events may now result in a 'fullDocumentBeforeChange' response field. - - `resume_after` (optional): A resume token. If provided, the + :param resume_after: A resume token. If provided, the change stream will start returning changes that occur directly after the operation specified in the resume token. A resume token is the _id value of a change document. - - `max_await_time_ms` (optional): The maximum time in milliseconds + :param max_await_time_ms: The maximum time in milliseconds for the server to wait for changes before responding to a getMore operation. - - `batch_size` (optional): The maximum number of documents to return + :param batch_size: The maximum number of documents to return per batch. - - `collation` (optional): The :class:`~pymongo.collation.Collation` + :param collation: The :class:`~pymongo.collation.Collation` to use for the aggregation. - - `start_at_operation_time` (optional): If provided, the resulting + :param start_at_operation_time: If provided, the resulting change stream will only return changes that occurred at or after the specified :class:`~bson.timestamp.Timestamp`. Requires MongoDB >= 4.0. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `start_after` (optional): The same as `resume_after` except that + :param start_after: The same as `resume_after` except that `start_after` can resume notifications after an invalidate event. This option and `resume_after` are mutually exclusive. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. + :param show_expanded_events: Include expanded events such as DDL events like `dropIndexes`. - :Returns: - A :class:`~pymongo.change_stream.DatabaseChangeStream` cursor. + :return: A :class:`~pymongo.change_stream.DatabaseChangeStream` cursor. .. versionchanged:: 4.3 Added `show_expanded_events` parameter. @@ -825,8 +815,7 @@ def command( >>> db.command("filemd5", object_id, root=file_root) - :Parameters: - - `command`: document representing the command to be issued, + :param command: document representing the command to be issued, or the name of the command (for simple commands only). .. note:: the order of keys in the `command` document is @@ -835,25 +824,25 @@ def command( should use an instance of :class:`~bson.son.SON` or a string and kwargs instead of a Python `dict`. - - `value` (optional): value to use for the command verb when + :param value: value to use for the command verb when `command` is passed as a string - - `check` (optional): check the response for errors, raising + :param check: check the response for errors, raising :class:`~pymongo.errors.OperationFailure` if there are any - - `allowable_errors`: if `check` is ``True``, error messages + :param allowable_errors: if `check` is ``True``, error messages in this list will be ignored by error-checking - - `read_preference` (optional): The read preference for this + :param read_preference: The read preference for this operation. See :mod:`~pymongo.read_preferences` for options. If the provided `session` is in a transaction, defaults to the read preference configured for the transaction. Otherwise, defaults to :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. - - `codec_options`: A :class:`~bson.codec_options.CodecOptions` + :param codec_options: A :class:`~bson.codec_options.CodecOptions` instance. - - `session` (optional): A + :param session: A :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): additional keyword arguments will + :param kwargs: additional keyword arguments will be added to the command document before it is sent @@ -924,8 +913,7 @@ def cursor_command( Otherwise, behaves identically to issuing a normal MongoDB command. - :Parameters: - - `command`: document representing the command to be issued, + :param command: document representing the command to be issued, or the name of the command (for simple commands only). .. note:: the order of keys in the `command` document is @@ -934,23 +922,23 @@ def cursor_command( should use an instance of :class:`~bson.son.SON` or a string and kwargs instead of a Python `dict`. - - `value` (optional): value to use for the command verb when - `command` is passed as a string - - `read_preference` (optional): The read preference for this - operation. See :mod:`~pymongo.read_preferences` for options. - If the provided `session` is in a transaction, defaults to the - read preference configured for the transaction. - Otherwise, defaults to - :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. - - `codec_options`: A :class:`~bson.codec_options.CodecOptions` - instance. - - `session` (optional): A - :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to future getMores for this - command. - - `max_await_time_ms` (optional): The number of ms to wait for more data on future getMores for this command. - - `**kwargs` (optional): additional keyword arguments will - be added to the command document before it is sent + :param value: value to use for the command verb when + `command` is passed as a string + :param read_preference: The read preference for this + operation. See :mod:`~pymongo.read_preferences` for options. + If the provided `session` is in a transaction, defaults to the + read preference configured for the transaction. + Otherwise, defaults to + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. + :param codec_options`: A :class:`~bson.codec_options.CodecOptions` + instance. + :param session: A + :class:`~pymongo.client_session.ClientSession`. + :param comment: A user-provided comment to attach to future getMores for this + command. + :param max_await_time_ms: The number of ms to wait for more data on future getMores for this command. + :param kwargs: additional keyword arguments will + be added to the command document before it is sent .. note:: :meth:`command` does **not** obey this Database's :attr:`read_preference` or :attr:`codec_options`. You must use the @@ -1066,22 +1054,20 @@ def list_collections( ) -> CommandCursor[MutableMapping[str, Any]]: """Get a cursor over the collections of this database. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `filter` (optional): A query document to filter the list of + :param filter: A query document to filter the list of collections returned from the listCollections command. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): Optional parameters of the + :param kwargs: Optional parameters of the `listCollections command `_ can be passed as keyword arguments to this method. The supported options differ by server version. - :Returns: - An instance of :class:`~pymongo.command_cursor.CommandCursor`. + :return: An instance of :class:`~pymongo.command_cursor.CommandCursor`. .. versionadded:: 3.6 """ @@ -1115,14 +1101,13 @@ def list_collection_names( filter = {"name": {"$regex": r"^(?!system\\.)"}} db.list_collection_names(filter=filter) - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `filter` (optional): A query document to filter the list of + :param filter: A query document to filter the list of collections returned from the listCollections command. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): Optional parameters of the + :param kwargs: Optional parameters of the `listCollections command `_ can be passed as keyword arguments to this method. The supported @@ -1176,14 +1161,13 @@ def drop_collection( ) -> dict[str, Any]: """Drop a collection. - :Parameters: - - `name_or_collection`: the name of a collection to drop or the + :param name_or_collection: the name of a collection to drop or the collection object itself - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `encrypted_fields`: **(BETA)** Document that describes the encrypted fields for + :param encrypted_fields: **(BETA)** Document that describes the encrypted fields for Queryable Encryption. For example:: { @@ -1261,20 +1245,19 @@ def validate_collection( See also the MongoDB documentation on the `validate command`_. - :Parameters: - - `name_or_collection`: A Collection object or the name of a + :param name_or_collection: A Collection object or the name of a collection to validate. - - `scandata`: Do extra checks beyond checking the overall + :param scandata: Do extra checks beyond checking the overall structure of the collection. - - `full`: Have the server do a more thorough scan of the + :param full: Have the server do a more thorough scan of the collection. Use with `scandata` for a thorough scan of the structure of the collection and the individual documents. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `background` (optional): A boolean flag that determines whether + :param background: A boolean flag that determines whether the command runs in the background. Requires MongoDB 4.4+. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. .. versionchanged:: 4.1 @@ -1359,13 +1342,12 @@ def dereference( :class:`ValueError` if `dbref` has a database specified that is different from the current database. - :Parameters: - - `dbref`: the reference - - `session` (optional): a + :param dbref: the reference + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): any additional keyword arguments + :param kwargs: any additional keyword arguments are the same as the arguments to :meth:`~pymongo.collection.Collection.find`. diff --git a/pymongo/encryption.py b/pymongo/encryption.py index 0d4966a4a8..25e9960f78 100644 --- a/pymongo/encryption.py +++ b/pymongo/encryption.py @@ -135,11 +135,9 @@ def __init__( def kms_request(self, kms_context: MongoCryptKmsContext) -> None: """Complete a KMS request. - :Parameters: - - `kms_context`: A :class:`MongoCryptKmsContext`. + :param kms_context: A :class:`MongoCryptKmsContext`. - :Returns: - None + :return: None """ endpoint = kms_context.endpoint message = kms_context.message @@ -194,12 +192,10 @@ def collection_info( The returned collection info is passed to libmongocrypt which reads the JSON schema. - :Parameters: - - `database`: The database on which to run listCollections. - - `filter`: The filter to pass to listCollections. + :param database: The database on which to run listCollections. + :param filter: The filter to pass to listCollections. - :Returns: - The first document from the listCollections command response as BSON. + :return: The first document from the listCollections command response as BSON. """ with self.client_ref()[database].list_collections(filter=RawBSONDocument(filter)) as cursor: for doc in cursor: @@ -220,12 +216,10 @@ def spawn(self) -> None: def mark_command(self, database: str, cmd: bytes) -> bytes: """Mark a command for encryption. - :Parameters: - - `database`: The database on which to run this command. - - `cmd`: The BSON command to run. + :param database: The database on which to run this command. + :param cmd: The BSON command to run. - :Returns: - The marked command response from mongocryptd. + :return: The marked command response from mongocryptd. """ if not self._spawned and not self.opts._mongocryptd_bypass_spawn: self.spawn() @@ -249,11 +243,9 @@ def mark_command(self, database: str, cmd: bytes) -> bytes: def fetch_keys(self, filter: bytes) -> Iterator[bytes]: """Yields one or more keys from the key vault. - :Parameters: - - `filter`: The filter to pass to find. + :param filter: The filter to pass to find. - :Returns: - A generator which yields the requested keys from the key vault. + :return: A generator which yields the requested keys from the key vault. """ assert self.key_vault_coll is not None with self.key_vault_coll.find(RawBSONDocument(filter)) as cursor: @@ -263,11 +255,9 @@ def fetch_keys(self, filter: bytes) -> Iterator[bytes]: def insert_data_key(self, data_key: bytes) -> Binary: """Insert a data key into the key vault. - :Parameters: - - `data_key`: The data key document to insert. + :param data_key: The data key document to insert. - :Returns: - The _id of the inserted data key document. + :return: The _id of the inserted data key document. """ raw_doc = RawBSONDocument(data_key, _KEY_VAULT_OPTS) data_key_id = raw_doc.get("_id") @@ -283,11 +273,9 @@ def bson_encode(self, doc: MutableMapping[str, Any]) -> bytes: A document can be any mapping type (like :class:`dict`). - :Parameters: - - `doc`: mapping type representing a document + :param doc: mapping type representing a document - :Returns: - The encoded BSON bytes. + :return: The encoded BSON bytes. """ return encode(doc) @@ -336,9 +324,8 @@ class _Encrypter: def __init__(self, client: MongoClient[_DocumentTypeArg], opts: AutoEncryptionOpts): """Create a _Encrypter for a client. - :Parameters: - - `client`: The encrypted MongoClient. - - `opts`: The encrypted client's :class:`AutoEncryptionOpts`. + :param client: The encrypted MongoClient. + :param opts: The encrypted client's :class:`AutoEncryptionOpts`. """ if opts._schema_map is None: schema_map = None @@ -404,13 +391,11 @@ def encrypt( ) -> MutableMapping[str, Any]: """Encrypt a MongoDB command. - :Parameters: - - `database`: The database for this command. - - `cmd`: A command document. - - `codec_options`: The CodecOptions to use while encoding `cmd`. + :param database: The database for this command. + :param cmd: A command document. + :param codec_options: The CodecOptions to use while encoding `cmd`. - :Returns: - The encrypted command to execute. + :return: The encrypted command to execute. """ self._check_closed() encoded_cmd = _dict_to_bson(cmd, False, codec_options) @@ -422,11 +407,9 @@ def encrypt( def decrypt(self, response: bytes) -> Optional[bytes]: """Decrypt a MongoDB command response. - :Parameters: - - `response`: A MongoDB command response as BSON. + :param response: A MongoDB command response as BSON. - :Returns: - The decrypted command response. + :return: The decrypted command response. """ self._check_closed() with _wrap_encryption_errors(): @@ -513,8 +496,7 @@ def __init__( See :ref:`explicit-client-side-encryption` for an example. - :Parameters: - - `kms_providers`: Map of KMS provider options. The `kms_providers` + :param kms_providers: Map of KMS provider options. The `kms_providers` map values differ by provider: - `aws`: Map with "accessKeyId" and "secretAccessKey" as strings. @@ -539,20 +521,20 @@ def __init__( data keys. This key should be generated and stored as securely as possible. - - `key_vault_namespace`: The namespace for the key vault collection. + :param key_vault_namespace: The namespace for the key vault collection. The key vault collection contains all data keys used for encryption and decryption. Data keys are stored as documents in this MongoDB collection. Data keys are protected with encryption by a KMS provider. - - `key_vault_client`: A MongoClient connected to a MongoDB cluster + :param key_vault_client: A MongoClient connected to a MongoDB cluster containing the `key_vault_namespace` collection. - - `codec_options`: An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions` to use when encoding a value for encryption and decoding the decrypted BSON value. This should be the same CodecOptions instance configured on the MongoClient, Database, or Collection used to access application data. - - `kms_tls_options` (optional): A map of KMS provider names to TLS + :param kms_tls_options: A map of KMS provider names to TLS options to use when creating secure connections to KMS providers. Accepts the same TLS options as :class:`pymongo.mongo_client.MongoClient`. For example, to @@ -621,10 +603,11 @@ def create_encrypted_collection( creation. :class:`~pymongo.errors.EncryptionError` will be raised if the collection already exists. - :Parameters: - - `name`: the name of the collection to create - - `encrypted_fields` (dict): Document that describes the encrypted fields for - Queryable Encryption. For example:: + :param name: the name of the collection to create + :param encrypted_fields: Document that describes the encrypted fields for + Queryable Encryption. The "keyId" may be set to ``None`` to auto-generate the data keys. For example: + + .. code-block: python { "escCollection": "enxcol_.encryptedCollection.esc", @@ -644,19 +627,17 @@ def create_encrypted_collection( ] } - The "keyId" may be set to ``None`` to auto-generate the data keys. - - `kms_provider` (optional): the KMS provider to be used - - `master_key` (optional): Identifies a KMS-specific key used to encrypt the + :param kms_provider: the KMS provider to be used + :param master_key: Identifies a KMS-specific key used to encrypt the new data key. If the kmsProvider is "local" the `master_key` is not applicable and may be omitted. - - `**kwargs` (optional): additional keyword arguments are the same as "create_collection". + :param kwargs: additional keyword arguments are the same as "create_collection". All optional `create collection command`_ parameters should be passed as keyword arguments to this method. See the documentation for :meth:`~pymongo.database.Database.create_collection` for all valid options. - :Raises: - - :class:`~pymongo.errors.EncryptedCollectionError`: When either data-key creation or creating the collection fails. + :raises: - :class:`~pymongo.errors.EncryptedCollectionError`: When either data-key creation or creating the collection fails. .. versionadded:: 4.4 @@ -693,10 +674,9 @@ def create_data_key( ) -> Binary: """Create and insert a new data key into the key vault collection. - :Parameters: - - `kms_provider`: The KMS provider to use. Supported values are + :param kms_provider: The KMS provider to use. Supported values are "aws", "azure", "gcp", "kmip", and "local". - - `master_key`: Identifies a KMS-specific key used to encrypt the + :param master_key: Identifies a KMS-specific key used to encrypt the new data key. If the kmsProvider is "local" the `master_key` is not applicable and may be omitted. @@ -740,7 +720,7 @@ def create_data_key( - `endpoint` (string): Optional. Host with optional port, e.g. "example.vault.azure.net:". - - `key_alt_names` (optional): An optional list of string alternate + :param key_alt_names: An optional list of string alternate names used to reference a key. If a key is created with alternate names, then encryption may refer to the key by the unique alternate name instead of by ``key_id``. The following example shows creating @@ -750,11 +730,10 @@ def create_data_key( # reference the key with the alternate name client_encryption.encrypt("457-55-5462", key_alt_name="name1", algorithm=Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Random) - - `key_material` (optional): Sets the custom key material to be used + :param key_material: Sets the custom key material to be used by the data key for encryption and decryption. - :Returns: - The ``_id`` of the created data key document as a + :return: The ``_id`` of the created data key document as a :class:`~bson.binary.Binary` with subtype :data:`~bson.binary.UUID_SUBTYPE`. @@ -828,23 +807,21 @@ def encrypt( Note that exactly one of ``key_id`` or ``key_alt_name`` must be provided. - :Parameters: - - `value`: The BSON value to encrypt. - - `algorithm` (string): The encryption algorithm to use. See + :param value: The BSON value to encrypt. + :param algorithm` (string): The encryption algorithm to use. See :class:`Algorithm` for some valid options. - - `key_id`: Identifies a data key by ``_id`` which must be a + :param key_id: Identifies a data key by ``_id`` which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - - `key_alt_name`: Identifies a key vault document by 'keyAltName'. - - `query_type` (str): The query type to execute. See :class:`QueryType` for valid options. - - `contention_factor` (int): The contention factor to use + :param key_alt_name: Identifies a key vault document by 'keyAltName'. + :param query_type` (str): The query type to execute. See :class:`QueryType` for valid options. + :param contention_factor` (int): The contention factor to use when the algorithm is :attr:`Algorithm.INDEXED`. An integer value *must* be given when the :attr:`Algorithm.INDEXED` algorithm is used. - - `range_opts`: Experimental only, not intended for public use. + :param range_opts: Experimental only, not intended for public use. - :Returns: - The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. + :return: The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. .. versionchanged:: 4.2 Added the `query_type` and `contention_factor` parameters. @@ -878,24 +855,22 @@ def encrypt_expression( Note that exactly one of ``key_id`` or ``key_alt_name`` must be provided. - :Parameters: - - `expression`: The BSON aggregate or match expression to encrypt. - - `algorithm` (string): The encryption algorithm to use. See + :param expression: The BSON aggregate or match expression to encrypt. + :param algorithm` (string): The encryption algorithm to use. See :class:`Algorithm` for some valid options. - - `key_id`: Identifies a data key by ``_id`` which must be a + :param key_id: Identifies a data key by ``_id`` which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - - `key_alt_name`: Identifies a key vault document by 'keyAltName'. - - `query_type` (str): The query type to execute. See + :param key_alt_name: Identifies a key vault document by 'keyAltName'. + :param query_type` (str): The query type to execute. See :class:`QueryType` for valid options. - - `contention_factor` (int): The contention factor to use + :param contention_factor` (int): The contention factor to use when the algorithm is :attr:`Algorithm.INDEXED`. An integer value *must* be given when the :attr:`Algorithm.INDEXED` algorithm is used. - - `range_opts`: Experimental only, not intended for public use. + :param range_opts: Experimental only, not intended for public use. - :Returns: - The encrypted expression, a :class:`~bson.RawBSONDocument`. + :return: The encrypted expression, a :class:`~bson.RawBSONDocument`. .. versionadded:: 4.4 """ @@ -916,12 +891,10 @@ def encrypt_expression( def decrypt(self, value: Binary) -> Any: """Decrypt an encrypted value. - :Parameters: - - `value` (Binary): The encrypted value, a + :param value` (Binary): The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. - :Returns: - The decrypted BSON value. + :return: The decrypted BSON value. """ self._check_closed() if not (isinstance(value, Binary) and value.subtype == 6): @@ -935,13 +908,11 @@ def decrypt(self, value: Binary) -> Any: def get_key(self, id: Binary) -> Optional[RawBSONDocument]: """Get a data key by id. - :Parameters: - - `id` (Binary): The UUID of a key a which must be a + :param id` (Binary): The UUID of a key a which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - :Returns: - The key document. + :return: The key document. .. versionadded:: 4.2 """ @@ -952,8 +923,7 @@ def get_key(self, id: Binary) -> Optional[RawBSONDocument]: def get_keys(self) -> Cursor[RawBSONDocument]: """Get all of the data keys. - :Returns: - An instance of :class:`~pymongo.cursor.Cursor` over the data key + :return: An instance of :class:`~pymongo.cursor.Cursor` over the data key documents. .. versionadded:: 4.2 @@ -965,13 +935,11 @@ def get_keys(self) -> Cursor[RawBSONDocument]: def delete_key(self, id: Binary) -> DeleteResult: """Delete a key document in the key vault collection that has the given ``key_id``. - :Parameters: - - `id` (Binary): The UUID of a key a which must be a + :param id` (Binary): The UUID of a key a which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - :Returns: - The delete result. + :return: The delete result. .. versionadded:: 4.2 """ @@ -982,14 +950,12 @@ def delete_key(self, id: Binary) -> DeleteResult: def add_key_alt_name(self, id: Binary, key_alt_name: str) -> Any: """Add ``key_alt_name`` to the set of alternate names in the key document with UUID ``key_id``. - :Parameters: - - ``id``: The UUID of a key a which must be a + :param `id`: The UUID of a key a which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - - ``key_alt_name``: The key alternate name to add. + :param `key_alt_name`: The key alternate name to add. - :Returns: - The previous version of the key document. + :return: The previous version of the key document. .. versionadded:: 4.2 """ @@ -1001,11 +967,9 @@ def add_key_alt_name(self, id: Binary, key_alt_name: str) -> Any: def get_key_by_alt_name(self, key_alt_name: str) -> Optional[RawBSONDocument]: """Get a key document in the key vault collection that has the given ``key_alt_name``. - :Parameters: - - `key_alt_name`: (str): The key alternate name of the key to get. + :param key_alt_name: (str): The key alternate name of the key to get. - :Returns: - The key document. + :return: The key document. .. versionadded:: 4.2 """ @@ -1018,14 +982,12 @@ def remove_key_alt_name(self, id: Binary, key_alt_name: str) -> Optional[RawBSON Also removes the ``keyAltNames`` field from the key document if it would otherwise be empty. - :Parameters: - - ``id``: The UUID of a key a which must be a + :param `id`: The UUID of a key a which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - - ``key_alt_name``: The key alternate name to remove. + :param `key_alt_name`: The key alternate name to remove. - :Returns: - Returns the previous version of the key document. + :return: Returns the previous version of the key document. .. versionadded:: 4.2 """ @@ -1059,15 +1021,13 @@ def rewrap_many_data_key( ) -> RewrapManyDataKeyResult: """Decrypts and encrypts all matching data keys in the key vault with a possibly new `master_key` value. - :Parameters: - - `filter`: A document used to filter the data keys. - - `provider`: The new KMS provider to use to encrypt the data keys, + :param filter: A document used to filter the data keys. + :param provider: The new KMS provider to use to encrypt the data keys, or ``None`` to use the current KMS provider(s). - - ``master_key``: The master key fields corresponding to the new KMS + :param `master_key`: The master key fields corresponding to the new KMS provider when ``provider`` is not ``None``. - :Returns: - A :class:`RewrapManyDataKeyResult`. + :return: A :class:`RewrapManyDataKeyResult`. This method allows you to re-encrypt all of your data-keys with a new CMK, or master key. Note that this does *not* require re-encrypting any of the data in your encrypted collections, diff --git a/pymongo/encryption_options.py b/pymongo/encryption_options.py index 36b366cafe..f5265c5b2e 100644 --- a/pymongo/encryption_options.py +++ b/pymongo/encryption_options.py @@ -69,8 +69,7 @@ def __init__( See :ref:`automatic-client-side-encryption` for an example. - :Parameters: - - `kms_providers`: Map of KMS provider options. The `kms_providers` + :param kms_providers: Map of KMS provider options. The `kms_providers` map values differ by provider: - `aws`: Map with "accessKeyId" and "secretAccessKey" as strings. @@ -95,16 +94,16 @@ def __init__( data keys. This key should be generated and stored as securely as possible. - - `key_vault_namespace`: The namespace for the key vault collection. + :param key_vault_namespace: The namespace for the key vault collection. The key vault collection contains all data keys used for encryption and decryption. Data keys are stored as documents in this MongoDB collection. Data keys are protected with encryption by a KMS provider. - - `key_vault_client` (optional): By default the key vault collection + :param key_vault_client: By default the key vault collection is assumed to reside in the same MongoDB cluster as the encrypted MongoClient. Use this option to route data key queries to a separate MongoDB cluster. - - `schema_map` (optional): Map of collection namespace ("db.coll") to + :param schema_map: Map of collection namespace ("db.coll") to JSON Schema. By default, a collection's JSONSchema is periodically polled with the listCollections command. But a JSONSchema may be specified locally with the schemaMap option. @@ -119,24 +118,24 @@ def __init__( automatic encryption for client side encryption. Other validation rules in the JSON schema will not be enforced by the driver and will result in an error. - - `bypass_auto_encryption` (optional): If ``True``, automatic + :param bypass_auto_encryption: If ``True``, automatic encryption will be disabled but automatic decryption will still be enabled. Defaults to ``False``. - - `mongocryptd_uri` (optional): The MongoDB URI used to connect + :param mongocryptd_uri: The MongoDB URI used to connect to the *local* mongocryptd process. Defaults to ``'mongodb://localhost:27020'``. - - `mongocryptd_bypass_spawn` (optional): If ``True``, the encrypted + :param mongocryptd_bypass_spawn: If ``True``, the encrypted MongoClient will not attempt to spawn the mongocryptd process. Defaults to ``False``. - - `mongocryptd_spawn_path` (optional): Used for spawning the + :param mongocryptd_spawn_path: Used for spawning the mongocryptd process. Defaults to ``'mongocryptd'`` and spawns mongocryptd from the system path. - - `mongocryptd_spawn_args` (optional): A list of string arguments to + :param mongocryptd_spawn_args: A list of string arguments to use when spawning the mongocryptd process. Defaults to ``['--idleShutdownTimeoutSecs=60']``. If the list does not include the ``idleShutdownTimeoutSecs`` option then ``'--idleShutdownTimeoutSecs=60'`` will be added. - - `kms_tls_options` (optional): A map of KMS provider names to TLS + :param kms_tls_options: A map of KMS provider names to TLS options to use when creating secure connections to KMS providers. Accepts the same TLS options as :class:`pymongo.mongo_client.MongoClient`. For example, to @@ -147,14 +146,14 @@ def __init__( Or to supply a client certificate:: kms_tls_options={'kmip': {'tlsCertificateKeyFile': 'client.pem'}} - - `crypt_shared_lib_path` (optional): Override the path to load the crypt_shared library. - - `crypt_shared_lib_required` (optional): If True, raise an error if libmongocrypt is + :param crypt_shared_lib_path: Override the path to load the crypt_shared library. + :param crypt_shared_lib_required: If True, raise an error if libmongocrypt is unable to load the crypt_shared library. - - `bypass_query_analysis` (optional): If ``True``, disable automatic analysis + :param bypass_query_analysis: If ``True``, disable automatic analysis of outgoing commands. Set `bypass_query_analysis` to use explicit encryption on indexed fields without the MongoDB Enterprise Advanced licensed crypt_shared library. - - `encrypted_fields_map`: Map of collection namespace ("db.coll") to documents + :param encrypted_fields_map: Map of collection namespace ("db.coll") to documents that described the encrypted fields for Queryable Encryption. For example:: { @@ -232,11 +231,10 @@ def __init__( .. note:: This feature is experimental only, and not intended for public use. - :Parameters: - - `sparsity`: An integer. - - `min`: A BSON scalar value corresponding to the type being queried. - - `max`: A BSON scalar value corresponding to the type being queried. - - `precision`: An integer, may only be set for double or decimal128 types. + :param sparsity: An integer. + :param min: A BSON scalar value corresponding to the type being queried. + :param max: A BSON scalar value corresponding to the type being queried. + :param precision: An integer, may only be set for double or decimal128 types. .. versionadded:: 4.4 """ diff --git a/pymongo/message.py b/pymongo/message.py index c04f4a8874..b59b88ab29 100644 --- a/pymongo/message.py +++ b/pymongo/message.py @@ -1475,8 +1475,7 @@ def raw_response( Can raise CursorNotFound, NotPrimaryError, ExecutionTimeout, or OperationFailure. - :Parameters: - - `cursor_id` (optional): cursor_id we sent to get this response - + :param cursor_id: cursor_id we sent to get this response - used for raising an informative exception when we get cursor id not valid at server response. """ @@ -1525,13 +1524,12 @@ def unpack_response( Can raise CursorNotFound, NotPrimaryError, ExecutionTimeout, or OperationFailure. - :Parameters: - - `cursor_id` (optional): cursor_id we sent to get this response - + :param cursor_id: cursor_id we sent to get this response - used for raising an informative exception when we get cursor id not valid at server response - - `codec_options` (optional): an instance of + :param codec_options: an instance of :class:`~bson.codec_options.CodecOptions` - - `user_fields` (optional): Response fields that should be decoded + :param user_fields: Response fields that should be decoded using the TypeDecoders from codec_options, passed to bson._decode_all_selective. """ @@ -1606,11 +1604,10 @@ def unpack_response( ) -> list[dict[str, Any]]: """Unpack a OP_MSG command response. - :Parameters: - - `cursor_id` (optional): Ignored, for compatibility with _OpReply. - - `codec_options` (optional): an instance of + :param cursor_id: Ignored, for compatibility with _OpReply. + :param codec_options: an instance of :class:`~bson.codec_options.CodecOptions` - - `user_fields` (optional): Response fields that should be decoded + :param user_fields: Response fields that should be decoded using the TypeDecoders from codec_options, passed to bson._decode_all_selective. """ diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index e8af251597..bccacdf7ea 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -246,28 +246,27 @@ def __init__( URI or keyword parameters. If the same option is passed in a URI and as a keyword parameter the keyword parameter takes precedence. - :Parameters: - - `host` (optional): hostname or IP address or Unix domain socket + :param host: hostname or IP address or Unix domain socket path of a single mongod or mongos instance to connect to, or a mongodb URI, or a list of hostnames (but no more than one mongodb URI). If `host` is an IPv6 literal it must be enclosed in '[' and ']' characters following the RFC2732 URL syntax (e.g. '[::1]' for localhost). Multihomed and round robin DNS addresses are **not** supported. - - `port` (optional): port number on which to connect - - `document_class` (optional): default class to use for + :param port: port number on which to connect + :param document_class: default class to use for documents returned from queries on this client - - `tz_aware` (optional): if ``True``, + :param tz_aware: if ``True``, :class:`~datetime.datetime` instances returned as values in a document by this :class:`MongoClient` will be timezone aware (otherwise they will be naive) - - `connect` (optional): if ``True`` (the default), immediately + :param connect: if ``True`` (the default), immediately begin connecting to MongoDB in the background. Otherwise connect on the first operation. - - `type_registry` (optional): instance of + :param type_registry: instance of :class:`~bson.codec_options.TypeRegistry` to enable encoding and decoding of custom types. - - `datetime_conversion`: Specifies how UTC datetimes should be decoded + :param datetime_conversion: Specifies how UTC datetimes should be decoded within BSON. Valid options include 'datetime_ms' to return as a DatetimeMS, 'datetime' to return as a datetime.datetime and raising a ValueError for out-of-range values, 'datetime_auto' to @@ -971,47 +970,45 @@ def watch( For a precise description of the resume process see the `change streams specification`_. - :Parameters: - - `pipeline` (optional): A list of aggregation pipeline stages to + :param pipeline: A list of aggregation pipeline stages to append to an initial ``$changeStream`` stage. Not all pipeline stages are valid after a ``$changeStream`` stage, see the MongoDB documentation on change streams for the supported stages. - - `full_document` (optional): The fullDocument to pass as an option + :param full_document: The fullDocument to pass as an option to the ``$changeStream`` stage. Allowed values: 'updateLookup', 'whenAvailable', 'required'. When set to 'updateLookup', the change notification for partial updates will include both a delta describing the changes to the document, as well as a copy of the entire document that was changed from some time after the change occurred. - - `full_document_before_change`: Allowed values: 'whenAvailable' + :param full_document_before_change: Allowed values: 'whenAvailable' and 'required'. Change events may now result in a 'fullDocumentBeforeChange' response field. - - `resume_after` (optional): A resume token. If provided, the + :param resume_after: A resume token. If provided, the change stream will start returning changes that occur directly after the operation specified in the resume token. A resume token is the _id value of a change document. - - `max_await_time_ms` (optional): The maximum time in milliseconds + :param max_await_time_ms: The maximum time in milliseconds for the server to wait for changes before responding to a getMore operation. - - `batch_size` (optional): The maximum number of documents to return + :param batch_size: The maximum number of documents to return per batch. - - `collation` (optional): The :class:`~pymongo.collation.Collation` + :param collation: The :class:`~pymongo.collation.Collation` to use for the aggregation. - - `start_at_operation_time` (optional): If provided, the resulting + :param start_at_operation_time: If provided, the resulting change stream will only return changes that occurred at or after the specified :class:`~bson.timestamp.Timestamp`. Requires MongoDB >= 4.0. - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `start_after` (optional): The same as `resume_after` except that + :param start_after: The same as `resume_after` except that `start_after` can resume notifications after an invalidate event. This option and `resume_after` are mutually exclusive. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. + :param show_expanded_events: Include expanded events such as DDL events like `dropIndexes`. - :Returns: - A :class:`~pymongo.change_stream.ClusterChangeStream` cursor. + :return: A :class:`~pymongo.change_stream.ClusterChangeStream` cursor. .. versionchanged:: 4.3 Added `show_expanded_events` parameter. @@ -1062,8 +1059,7 @@ def topology_description(self) -> TopologyDescription: to get a more recent :class:`~pymongo.topology_description.TopologyDescription`. - :Returns: - An instance of + :return: An instance of :class:`~pymongo.topology_description.TopologyDescription`. .. versionadded:: 4.0 @@ -1175,8 +1171,7 @@ def nodes(self) -> FrozenSet[_Address]: def options(self) -> ClientOptions: """The configuration options for this client. - :Returns: - An instance of :class:`~pymongo.client_options.ClientOptions`. + :return: An instance of :class:`~pymongo.client_options.ClientOptions`. .. versionadded:: 4.0 """ @@ -1281,12 +1276,11 @@ def _select_server( ) -> Server: """Select a server to run an operation on this client. - :Parameters: - - `server_selector`: The server selector to use if the session is + :param server_selector: The server selector to use if the session is not pinned and no address is given. - - `session`: The ClientSession for the next operation, or None. May + :param session: The ClientSession for the next operation, or None. May be pinned to a mongos server address. - - `address` (optional): Address when sending a message + :param address: Address when sending a message to a specific server, used for getMore. """ try: @@ -1361,10 +1355,9 @@ def _run_operation( ) -> Response: """Run a _Query/_GetMore operation and return a Response. - :Parameters: - - `operation`: a _Query or _GetMore object. - - `unpack_res`: A callable that decodes the wire protocol response. - - `address` (optional): Optional address when sending a message + :param operation: a _Query or _GetMore object. + :param unpack_res: A callable that decodes the wire protocol response. + :param address: Optional address when sending a message to a specific server, used for getMore. """ if operation.conn_mgr: @@ -1441,17 +1434,15 @@ def _retry_internal( ) -> T: """Internal retryable helper for all client transactions. - :Parameters: - - `func`: Callback function we want to retry - - `session`: Client Session on which the transaction should occur - - `bulk`: Abstraction to handle bulk write operations - - `is_read`: If this is an exclusive read transaction, defaults to False - - `address`: Server Address, defaults to None - - `read_pref`: Topology of read operation, defaults to None - - `retryable`: If the operation should be retried once, defaults to None - - :Returns: - Output of the calling func() + :param func: Callback function we want to retry + :param session: Client Session on which the transaction should occur + :param bulk: Abstraction to handle bulk write operations + :param is_read: If this is an exclusive read transaction, defaults to False + :param address: Server Address, defaults to None + :param read_pref: Topology of read operation, defaults to None + :param retryable: If the operation should be retried once, defaults to None + + :return: Output of the calling func() """ return _ClientConnectionRetryable( mongo_client=self, @@ -1479,11 +1470,11 @@ def _retryable_read( Re-raises any exception thrown by func(). - - `func`: Read call we want to execute - - `read_pref`: Desired topology of read operation - - `session`: Client session we should use to execute operation - - `address`: Optional address when sending a message, defaults to None - - `retryable`: if we should attempt retries + :param func: Read call we want to execute + :param read_pref: Desired topology of read operation + :param session: Client session we should use to execute operation + :param address: Optional address when sending a message, defaults to None + :param retryable: if we should attempt retries (may not always be supported even if supplied), defaults to False """ @@ -1516,11 +1507,10 @@ def _retryable_write( Re-raises any exception thrown by func(). - :Parameters: - - `retryable`: if we should attempt retries (may not always be supported) - - `func`: write call we want to execute during a session - - `session`: Client session we will use to execute write operation - - `bulk`: bulk abstraction to execute operations in bulk, defaults to None + :param retryable: if we should attempt retries (may not always be supported) + :param func: write call we want to execute during a session + :param session: Client session we will use to execute write operation + :param bulk: bulk abstraction to execute operations in bulk, defaults to None """ with self._tmp_session(session) as s: return self._retry_with_session(retryable, func, s, bulk) @@ -1578,8 +1568,7 @@ def __getattr__(self, name: str) -> database.Database[_DocumentType]: Raises :class:`~pymongo.errors.InvalidName` if an invalid database name is used. - :Parameters: - - `name`: the name of the database to get + :param name: the name of the database to get """ if name.startswith("_"): raise AttributeError( @@ -1594,8 +1583,7 @@ def __getitem__(self, name: str) -> database.Database[_DocumentType]: Raises :class:`~pymongo.errors.InvalidName` if an invalid database name is used. - :Parameters: - - `name`: the name of the database to get + :param name: the name of the database to get """ return database.Database(self, name) @@ -1614,13 +1602,12 @@ def _cleanup_cursor( pinned connection or implicit session attached at the time the cursor was closed or garbage collected. - :Parameters: - - `locks_allowed`: True if we are allowed to acquire locks. - - `cursor_id`: The cursor id which may be 0. - - `address`: The _CursorAddress. - - `conn_mgr`: The _ConnectionManager for the pinned connection or None. - - `session`: The cursor's session. - - `explicit_session`: True if the session was passed explicitly. + :param locks_allowed: True if we are allowed to acquire locks. + :param cursor_id: The cursor id which may be 0. + :param address: The _CursorAddress. + :param conn_mgr: The _ConnectionManager for the pinned connection or None. + :param session: The cursor's session. + :param explicit_session: True if the session was passed explicitly. """ if locks_allowed: if cursor_id: @@ -1791,8 +1778,7 @@ def start_session( or process at a time. A single :class:`ClientSession` cannot be used to run multiple operations concurrently. - :Returns: - An instance of :class:`~pymongo.client_session.ClientSession`. + :return: An instance of :class:`~pymongo.client_session.ClientSession`. .. versionadded:: 3.6 """ @@ -1881,8 +1867,7 @@ def _process_response(self, reply: Mapping[str, Any], session: Optional[ClientSe def server_info(self, session: Optional[client_session.ClientSession] = None) -> dict[str, Any]: """Get information about the MongoDB server we're connected to. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. .. versionchanged:: 3.6 @@ -1903,20 +1888,18 @@ def list_databases( ) -> CommandCursor[dict[str, Any]]: """Get a cursor over the databases of the connected server. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. - - `**kwargs` (optional): Optional parameters of the + :param kwargs: Optional parameters of the `listDatabases command `_ can be passed as keyword arguments to this method. The supported options differ by server version. - :Returns: - An instance of :class:`~pymongo.command_cursor.CommandCursor`. + :return: An instance of :class:`~pymongo.command_cursor.CommandCursor`. .. versionadded:: 3.6 """ @@ -1941,10 +1924,9 @@ def list_database_names( ) -> list[str]: """Get a list of the names of all databases on the connected server. - :Parameters: - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. .. versionchanged:: 4.1 @@ -1966,13 +1948,12 @@ def drop_database( Raises :class:`TypeError` if `name_or_database` is not an instance of :class:`str` or :class:`~pymongo.database.Database`. - :Parameters: - - `name_or_database`: the name of a database to drop, or a + :param name_or_database: the name of a database to drop, or a :class:`~pymongo.database.Database` instance representing the database to drop - - `session` (optional): a + :param session: a :class:`~pymongo.client_session.ClientSession`. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. .. versionchanged:: 4.1 @@ -2026,26 +2007,25 @@ def get_default_database( Useful in scripts where you want to choose which database to use based only on the URI in a configuration file. - :Parameters: - - `default` (optional): the database name to use if no database name + :param default: the database name to use if no database name was provided in the URI. - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`MongoClient` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` for options. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`MongoClient` is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`MongoClient` is used. - - `comment` (optional): A user-provided comment to attach to this + :param comment: A user-provided comment to attach to this command. .. versionchanged:: 4.1 @@ -2093,23 +2073,22 @@ def get_database( >>> db2.read_preference Secondary(tag_sets=None) - :Parameters: - - `name` (optional): The name of the database - a string. If ``None`` + :param name: The name of the database - a string. If ``None`` (the default) the database named in the MongoDB connection URI is returned. - - `codec_options` (optional): An instance of + :param codec_options: An instance of :class:`~bson.codec_options.CodecOptions`. If ``None`` (the default) the :attr:`codec_options` of this :class:`MongoClient` is used. - - `read_preference` (optional): The read preference to use. If + :param read_preference: The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` for options. - - `write_concern` (optional): An instance of + :param write_concern: An instance of :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the default) the :attr:`write_concern` of this :class:`MongoClient` is used. - - `read_concern` (optional): An instance of + :param read_concern: An instance of :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the default) the :attr:`read_concern` of this :class:`MongoClient` is used. @@ -2299,11 +2278,9 @@ def __init__( def run(self) -> T: """Runs the supplied func() and attempts a retry - :Raises: - self._last_error: Last exception raised + :raises: self._last_error: Last exception raised - :Returns: - Result of the func() call + :return: Result of the func() call """ # Increment the transaction id up front to ensure any retry attempt # will use the proper txnNumber, even if server or socket selection @@ -2389,8 +2366,7 @@ def _check_last_error(self, check_csot: bool = False) -> None: """Checks if the ongoing client exchange experienced a exception previously. If so, raise last error - :Parameters: - - `check_csot`: Checks CSOT to ensure we are retrying with time remaining defaults to False + :param check_csot: Checks CSOT to ensure we are retrying with time remaining defaults to False """ if self._is_retrying(): remaining = _csot.remaining() @@ -2401,8 +2377,7 @@ def _check_last_error(self, check_csot: bool = False) -> None: def _get_server(self) -> Server: """Retrieves a server object based on provided object context - :Returns: - Abstraction to connect to server + :return: Abstraction to connect to server """ return self._client._select_server( self._server_selector, @@ -2414,8 +2389,7 @@ def _get_server(self) -> Server: def _write(self) -> T: """Wrapper method for write-type retryable client executions - :Returns: - Output for func()'s call + :return: Output for func()'s call """ try: max_wire_version = 0 @@ -2441,8 +2415,7 @@ def _write(self) -> T: def _read(self) -> T: """Wrapper method for read-type retryable client executions - :Returns: - Output for func()'s call + :return: Output for func()'s call """ self._server = self._get_server() assert self._read_pref is not None, "Read Preference required on read calls" diff --git a/pymongo/monitoring.py b/pymongo/monitoring.py index 03b3c53180..7bd512dada 100644 --- a/pymongo/monitoring.py +++ b/pymongo/monitoring.py @@ -229,24 +229,21 @@ class CommandListener(_EventListener): def started(self, event: CommandStartedEvent) -> None: """Abstract method to handle a `CommandStartedEvent`. - :Parameters: - - `event`: An instance of :class:`CommandStartedEvent`. + :param event: An instance of :class:`CommandStartedEvent`. """ raise NotImplementedError def succeeded(self, event: CommandSucceededEvent) -> None: """Abstract method to handle a `CommandSucceededEvent`. - :Parameters: - - `event`: An instance of :class:`CommandSucceededEvent`. + :param event: An instance of :class:`CommandSucceededEvent`. """ raise NotImplementedError def failed(self, event: CommandFailedEvent) -> None: """Abstract method to handle a `CommandFailedEvent`. - :Parameters: - - `event`: An instance of :class:`CommandFailedEvent`. + :param event: An instance of :class:`CommandFailedEvent`. """ raise NotImplementedError @@ -272,8 +269,7 @@ def pool_created(self, event: PoolCreatedEvent) -> None: Emitted when a connection Pool is created. - :Parameters: - - `event`: An instance of :class:`PoolCreatedEvent`. + :param event: An instance of :class:`PoolCreatedEvent`. """ raise NotImplementedError @@ -282,8 +278,7 @@ def pool_ready(self, event: PoolReadyEvent) -> None: Emitted when a connection Pool is marked ready. - :Parameters: - - `event`: An instance of :class:`PoolReadyEvent`. + :param event: An instance of :class:`PoolReadyEvent`. .. versionadded:: 4.0 """ @@ -294,8 +289,7 @@ def pool_cleared(self, event: PoolClearedEvent) -> None: Emitted when a connection Pool is cleared. - :Parameters: - - `event`: An instance of :class:`PoolClearedEvent`. + :param event: An instance of :class:`PoolClearedEvent`. """ raise NotImplementedError @@ -304,8 +298,7 @@ def pool_closed(self, event: PoolClosedEvent) -> None: Emitted when a connection Pool is closed. - :Parameters: - - `event`: An instance of :class:`PoolClosedEvent`. + :param event: An instance of :class:`PoolClosedEvent`. """ raise NotImplementedError @@ -314,8 +307,7 @@ def connection_created(self, event: ConnectionCreatedEvent) -> None: Emitted when a connection Pool creates a Connection object. - :Parameters: - - `event`: An instance of :class:`ConnectionCreatedEvent`. + :param event: An instance of :class:`ConnectionCreatedEvent`. """ raise NotImplementedError @@ -325,8 +317,7 @@ def connection_ready(self, event: ConnectionReadyEvent) -> None: Emitted when a connection has finished its setup, and is now ready to use. - :Parameters: - - `event`: An instance of :class:`ConnectionReadyEvent`. + :param event: An instance of :class:`ConnectionReadyEvent`. """ raise NotImplementedError @@ -335,8 +326,7 @@ def connection_closed(self, event: ConnectionClosedEvent) -> None: Emitted when a connection Pool closes a connection. - :Parameters: - - `event`: An instance of :class:`ConnectionClosedEvent`. + :param event: An instance of :class:`ConnectionClosedEvent`. """ raise NotImplementedError @@ -345,8 +335,7 @@ def connection_check_out_started(self, event: ConnectionCheckOutStartedEvent) -> Emitted when the driver starts attempting to check out a connection. - :Parameters: - - `event`: An instance of :class:`ConnectionCheckOutStartedEvent`. + :param event: An instance of :class:`ConnectionCheckOutStartedEvent`. """ raise NotImplementedError @@ -355,8 +344,7 @@ def connection_check_out_failed(self, event: ConnectionCheckOutFailedEvent) -> N Emitted when the driver's attempt to check out a connection fails. - :Parameters: - - `event`: An instance of :class:`ConnectionCheckOutFailedEvent`. + :param event: An instance of :class:`ConnectionCheckOutFailedEvent`. """ raise NotImplementedError @@ -365,8 +353,7 @@ def connection_checked_out(self, event: ConnectionCheckedOutEvent) -> None: Emitted when the driver successfully checks out a connection. - :Parameters: - - `event`: An instance of :class:`ConnectionCheckedOutEvent`. + :param event: An instance of :class:`ConnectionCheckedOutEvent`. """ raise NotImplementedError @@ -376,8 +363,7 @@ def connection_checked_in(self, event: ConnectionCheckedInEvent) -> None: Emitted when the driver checks in a connection back to the connection Pool. - :Parameters: - - `event`: An instance of :class:`ConnectionCheckedInEvent`. + :param event: An instance of :class:`ConnectionCheckedInEvent`. """ raise NotImplementedError @@ -394,24 +380,21 @@ class ServerHeartbeatListener(_EventListener): def started(self, event: ServerHeartbeatStartedEvent) -> None: """Abstract method to handle a `ServerHeartbeatStartedEvent`. - :Parameters: - - `event`: An instance of :class:`ServerHeartbeatStartedEvent`. + :param event: An instance of :class:`ServerHeartbeatStartedEvent`. """ raise NotImplementedError def succeeded(self, event: ServerHeartbeatSucceededEvent) -> None: """Abstract method to handle a `ServerHeartbeatSucceededEvent`. - :Parameters: - - `event`: An instance of :class:`ServerHeartbeatSucceededEvent`. + :param event: An instance of :class:`ServerHeartbeatSucceededEvent`. """ raise NotImplementedError def failed(self, event: ServerHeartbeatFailedEvent) -> None: """Abstract method to handle a `ServerHeartbeatFailedEvent`. - :Parameters: - - `event`: An instance of :class:`ServerHeartbeatFailedEvent`. + :param event: An instance of :class:`ServerHeartbeatFailedEvent`. """ raise NotImplementedError @@ -427,24 +410,21 @@ class TopologyListener(_EventListener): def opened(self, event: TopologyOpenedEvent) -> None: """Abstract method to handle a `TopologyOpenedEvent`. - :Parameters: - - `event`: An instance of :class:`TopologyOpenedEvent`. + :param event: An instance of :class:`TopologyOpenedEvent`. """ raise NotImplementedError def description_changed(self, event: TopologyDescriptionChangedEvent) -> None: """Abstract method to handle a `TopologyDescriptionChangedEvent`. - :Parameters: - - `event`: An instance of :class:`TopologyDescriptionChangedEvent`. + :param event: An instance of :class:`TopologyDescriptionChangedEvent`. """ raise NotImplementedError def closed(self, event: TopologyClosedEvent) -> None: """Abstract method to handle a `TopologyClosedEvent`. - :Parameters: - - `event`: An instance of :class:`TopologyClosedEvent`. + :param event: An instance of :class:`TopologyClosedEvent`. """ raise NotImplementedError @@ -460,24 +440,21 @@ class ServerListener(_EventListener): def opened(self, event: ServerOpeningEvent) -> None: """Abstract method to handle a `ServerOpeningEvent`. - :Parameters: - - `event`: An instance of :class:`ServerOpeningEvent`. + :param event: An instance of :class:`ServerOpeningEvent`. """ raise NotImplementedError def description_changed(self, event: ServerDescriptionChangedEvent) -> None: """Abstract method to handle a `ServerDescriptionChangedEvent`. - :Parameters: - - `event`: An instance of :class:`ServerDescriptionChangedEvent`. + :param event: An instance of :class:`ServerDescriptionChangedEvent`. """ raise NotImplementedError def closed(self, event: ServerClosedEvent) -> None: """Abstract method to handle a `ServerClosedEvent`. - :Parameters: - - `event`: An instance of :class:`ServerClosedEvent`. + :param event: An instance of :class:`ServerClosedEvent`. """ raise NotImplementedError @@ -507,8 +484,7 @@ def _validate_event_listeners( def register(listener: _EventListener) -> None: """Register a global event listener. - :Parameters: - - `listener`: A subclasses of :class:`CommandListener`, + :param listener: A subclasses of :class:`CommandListener`, :class:`ServerHeartbeatListener`, :class:`ServerListener`, :class:`TopologyListener`, or :class:`ConnectionPoolListener`. """ @@ -619,14 +595,13 @@ def database_name(self) -> str: class CommandStartedEvent(_CommandEvent): """Event published when a command starts. - :Parameters: - - `command`: The command document. - - `database_name`: The name of the database this command was run against. - - `request_id`: The request id for this operation. - - `connection_id`: The address (host, port) of the server this command + :param command: The command document. + :param database_name: The name of the database this command was run against. + :param request_id: The request id for this operation. + :param connection_id: The address (host, port) of the server this command was sent to. - - `operation_id`: An optional identifier for a series of related events. - - `service_id`: The service_id this command was sent to, or ``None``. + :param operation_id: An optional identifier for a series of related events. + :param service_id: The service_id this command was sent to, or ``None``. """ __slots__ = ("__cmd",) @@ -682,16 +657,15 @@ def __repr__(self) -> str: class CommandSucceededEvent(_CommandEvent): """Event published when a command succeeds. - :Parameters: - - `duration`: The command duration as a datetime.timedelta. - - `reply`: The server reply document. - - `command_name`: The command name. - - `request_id`: The request id for this operation. - - `connection_id`: The address (host, port) of the server this command + :param duration: The command duration as a datetime.timedelta. + :param reply: The server reply document. + :param command_name: The command name. + :param request_id: The request id for this operation. + :param connection_id: The address (host, port) of the server this command was sent to. - - `operation_id`: An optional identifier for a series of related events. - - `service_id`: The service_id this command was sent to, or ``None``. - - `database_name`: The database this command was sent to, or ``""``. + :param operation_id: An optional identifier for a series of related events. + :param service_id: The service_id this command was sent to, or ``None``. + :param database_name: The database this command was sent to, or ``""``. """ __slots__ = ("__duration_micros", "__reply") @@ -749,16 +723,15 @@ def __repr__(self) -> str: class CommandFailedEvent(_CommandEvent): """Event published when a command fails. - :Parameters: - - `duration`: The command duration as a datetime.timedelta. - - `failure`: The server reply document. - - `command_name`: The command name. - - `request_id`: The request id for this operation. - - `connection_id`: The address (host, port) of the server this command + :param duration: The command duration as a datetime.timedelta. + :param failure: The server reply document. + :param command_name: The command name. + :param request_id: The request id for this operation. + :param connection_id: The address (host, port) of the server this command was sent to. - - `operation_id`: An optional identifier for a series of related events. - - `service_id`: The service_id this command was sent to, or ``None``. - - `database_name`: The database this command was sent to, or ``""``. + :param operation_id: An optional identifier for a series of related events. + :param service_id: The service_id this command was sent to, or ``None``. + :param database_name: The database this command was sent to, or ``""``. """ __slots__ = ("__duration_micros", "__failure") @@ -833,8 +806,7 @@ def __repr__(self) -> str: class PoolCreatedEvent(_PoolEvent): """Published when a Connection Pool is created. - :Parameters: - - `address`: The address (host, port) pair of the server this Pool is + :param address: The address (host, port) pair of the server this Pool is attempting to connect to. .. versionadded:: 3.9 @@ -858,8 +830,7 @@ def __repr__(self) -> str: class PoolReadyEvent(_PoolEvent): """Published when a Connection Pool is marked ready. - :Parameters: - - `address`: The address (host, port) pair of the server this Pool is + :param address: The address (host, port) pair of the server this Pool is attempting to connect to. .. versionadded:: 4.0 @@ -871,10 +842,9 @@ class PoolReadyEvent(_PoolEvent): class PoolClearedEvent(_PoolEvent): """Published when a Connection Pool is cleared. - :Parameters: - - `address`: The address (host, port) pair of the server this Pool is + :param address: The address (host, port) pair of the server this Pool is attempting to connect to. - - `service_id`: The service_id this command was sent to, or ``None``. + :param service_id: The service_id this command was sent to, or ``None``. .. versionadded:: 3.9 """ @@ -902,8 +872,7 @@ def __repr__(self) -> str: class PoolClosedEvent(_PoolEvent): """Published when a Connection Pool is closed. - :Parameters: - - `address`: The address (host, port) pair of the server this Pool is + :param address: The address (host, port) pair of the server this Pool is attempting to connect to. .. versionadded:: 3.9 @@ -995,10 +964,9 @@ class ConnectionCreatedEvent(_ConnectionIdEvent): NOTE: This connection is not ready for use until the :class:`ConnectionReadyEvent` is published. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. - - `connection_id`: The integer ID of the Connection in this Pool. + :param connection_id: The integer ID of the Connection in this Pool. .. versionadded:: 3.9 """ @@ -1009,10 +977,9 @@ class ConnectionCreatedEvent(_ConnectionIdEvent): class ConnectionReadyEvent(_ConnectionIdEvent): """Published when a Connection has finished its setup, and is ready to use. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. - - `connection_id`: The integer ID of the Connection in this Pool. + :param connection_id: The integer ID of the Connection in this Pool. .. versionadded:: 3.9 """ @@ -1023,11 +990,10 @@ class ConnectionReadyEvent(_ConnectionIdEvent): class ConnectionClosedEvent(_ConnectionIdEvent): """Published when a Connection is closed. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. - - `connection_id`: The integer ID of the Connection in this Pool. - - `reason`: A reason explaining why this connection was closed. + :param connection_id: The integer ID of the Connection in this Pool. + :param reason: A reason explaining why this connection was closed. .. versionadded:: 3.9 """ @@ -1059,8 +1025,7 @@ def __repr__(self) -> str: class ConnectionCheckOutStartedEvent(_ConnectionEvent): """Published when the driver starts attempting to check out a connection. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. .. versionadded:: 3.9 @@ -1072,10 +1037,9 @@ class ConnectionCheckOutStartedEvent(_ConnectionEvent): class ConnectionCheckOutFailedEvent(_ConnectionEvent): """Published when the driver's attempt to check out a connection fails. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. - - `reason`: A reason explaining why connection check out failed. + :param reason: A reason explaining why connection check out failed. .. versionadded:: 3.9 """ @@ -1102,10 +1066,9 @@ def __repr__(self) -> str: class ConnectionCheckedOutEvent(_ConnectionIdEvent): """Published when the driver successfully checks out a connection. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. - - `connection_id`: The integer ID of the Connection in this Pool. + :param connection_id: The integer ID of the Connection in this Pool. .. versionadded:: 3.9 """ @@ -1116,10 +1079,9 @@ class ConnectionCheckedOutEvent(_ConnectionIdEvent): class ConnectionCheckedInEvent(_ConnectionIdEvent): """Published when the driver checks in a Connection into the Pool. - :Parameters: - - `address`: The address (host, port) pair of the server this + :param address: The address (host, port) pair of the server this Connection is attempting to connect to. - - `connection_id`: The integer ID of the Connection in this Pool. + :param connection_id: The integer ID of the Connection in this Pool. .. versionadded:: 3.9 """ @@ -1422,8 +1384,7 @@ class _EventListeners: Any event listeners registered globally are included by default. - :Parameters: - - `listeners`: A list of event listeners. + :param listeners: A list of event listeners. """ def __init__(self, listeners: Optional[Sequence[_EventListener]]): @@ -1497,15 +1458,14 @@ def publish_command_start( ) -> None: """Publish a CommandStartedEvent to all command listeners. - :Parameters: - - `command`: The command document. - - `database_name`: The name of the database this command was run + :param command: The command document. + :param database_name: The name of the database this command was run against. - - `request_id`: The request id for this operation. - - `connection_id`: The address (host, port) of the server this + :param request_id: The request id for this operation. + :param connection_id: The address (host, port) of the server this command was sent to. - - `op_id`: The (optional) operation id for this operation. - - `service_id`: The service_id this command was sent to, or ``None``. + :param op_id: The (optional) operation id for this operation. + :param service_id: The service_id this command was sent to, or ``None``. """ if op_id is None: op_id = request_id @@ -1532,17 +1492,16 @@ def publish_command_success( ) -> None: """Publish a CommandSucceededEvent to all command listeners. - :Parameters: - - `duration`: The command duration as a datetime.timedelta. - - `reply`: The server reply document. - - `command_name`: The command name. - - `request_id`: The request id for this operation. - - `connection_id`: The address (host, port) of the server this + :param duration: The command duration as a datetime.timedelta. + :param reply: The server reply document. + :param command_name: The command name. + :param request_id: The request id for this operation. + :param connection_id: The address (host, port) of the server this command was sent to. - - `op_id`: The (optional) operation id for this operation. - - `service_id`: The service_id this command was sent to, or ``None``. - - `speculative_hello`: Was the command sent with speculative auth? - - `database_name`: The database this command was sent to, or ``""``. + :param op_id: The (optional) operation id for this operation. + :param service_id: The service_id this command was sent to, or ``None``. + :param speculative_hello: Was the command sent with speculative auth? + :param database_name: The database this command was sent to, or ``""``. """ if op_id is None: op_id = request_id @@ -1579,17 +1538,16 @@ def publish_command_failure( ) -> None: """Publish a CommandFailedEvent to all command listeners. - :Parameters: - - `duration`: The command duration as a datetime.timedelta. - - `failure`: The server reply document or failure description + :param duration: The command duration as a datetime.timedelta. + :param failure: The server reply document or failure description document. - - `command_name`: The command name. - - `request_id`: The request id for this operation. - - `connection_id`: The address (host, port) of the server this + :param command_name: The command name. + :param request_id: The request id for this operation. + :param connection_id: The address (host, port) of the server this command was sent to. - - `op_id`: The (optional) operation id for this operation. - - `service_id`: The service_id this command was sent to, or ``None``. - - `database_name`: The database this command was sent to, or ``""``. + :param op_id: The (optional) operation id for this operation. + :param service_id: The service_id this command was sent to, or ``None``. + :param database_name: The database this command was sent to, or ``""``. """ if op_id is None: op_id = request_id @@ -1613,9 +1571,8 @@ def publish_server_heartbeat_started(self, connection_id: _Address, awaited: boo """Publish a ServerHeartbeatStartedEvent to all server heartbeat listeners. - :Parameters: - - `connection_id`: The address (host, port) pair of the connection. - - `awaited`: True if this heartbeat is part of an awaitable hello command. + :param connection_id: The address (host, port) pair of the connection. + :param awaited: True if this heartbeat is part of an awaitable hello command. """ event = ServerHeartbeatStartedEvent(connection_id, awaited) for subscriber in self.__server_heartbeat_listeners: @@ -1630,12 +1587,11 @@ def publish_server_heartbeat_succeeded( """Publish a ServerHeartbeatSucceededEvent to all server heartbeat listeners. - :Parameters: - - `connection_id`: The address (host, port) pair of the connection. - - `duration`: The execution time of the event in the highest possible + :param connection_id: The address (host, port) pair of the connection. + :param duration: The execution time of the event in the highest possible resolution for the platform. - - `reply`: The command reply. - - `awaited`: True if the response was awaited. + :param reply: The command reply. + :param awaited: True if the response was awaited. """ event = ServerHeartbeatSucceededEvent(duration, reply, connection_id, awaited) for subscriber in self.__server_heartbeat_listeners: @@ -1650,12 +1606,11 @@ def publish_server_heartbeat_failed( """Publish a ServerHeartbeatFailedEvent to all server heartbeat listeners. - :Parameters: - - `connection_id`: The address (host, port) pair of the connection. - - `duration`: The execution time of the event in the highest possible + :param connection_id: The address (host, port) pair of the connection. + :param duration: The execution time of the event in the highest possible resolution for the platform. - - `reply`: The command reply. - - `awaited`: True if the response was awaited. + :param reply: The command reply. + :param awaited: True if the response was awaited. """ event = ServerHeartbeatFailedEvent(duration, reply, connection_id, awaited) for subscriber in self.__server_heartbeat_listeners: @@ -1667,9 +1622,8 @@ def publish_server_heartbeat_failed( def publish_server_opened(self, server_address: _Address, topology_id: ObjectId) -> None: """Publish a ServerOpeningEvent to all server listeners. - :Parameters: - - `server_address`: The address (host, port) pair of the server. - - `topology_id`: A unique identifier for the topology this server + :param server_address: The address (host, port) pair of the server. + :param topology_id: A unique identifier for the topology this server is a part of. """ event = ServerOpeningEvent(server_address, topology_id) @@ -1682,9 +1636,8 @@ def publish_server_opened(self, server_address: _Address, topology_id: ObjectId) def publish_server_closed(self, server_address: _Address, topology_id: ObjectId) -> None: """Publish a ServerClosedEvent to all server listeners. - :Parameters: - - `server_address`: The address (host, port) pair of the server. - - `topology_id`: A unique identifier for the topology this server + :param server_address: The address (host, port) pair of the server. + :param topology_id: A unique identifier for the topology this server is a part of. """ event = ServerClosedEvent(server_address, topology_id) @@ -1703,11 +1656,10 @@ def publish_server_description_changed( ) -> None: """Publish a ServerDescriptionChangedEvent to all server listeners. - :Parameters: - - `previous_description`: The previous server description. - - `server_address`: The address (host, port) pair of the server. - - `new_description`: The new server description. - - `topology_id`: A unique identifier for the topology this server + :param previous_description: The previous server description. + :param server_address: The address (host, port) pair of the server. + :param new_description: The new server description. + :param topology_id: A unique identifier for the topology this server is a part of. """ event = ServerDescriptionChangedEvent( @@ -1722,8 +1674,7 @@ def publish_server_description_changed( def publish_topology_opened(self, topology_id: ObjectId) -> None: """Publish a TopologyOpenedEvent to all topology listeners. - :Parameters: - - `topology_id`: A unique identifier for the topology this server + :param topology_id: A unique identifier for the topology this server is a part of. """ event = TopologyOpenedEvent(topology_id) @@ -1736,8 +1687,7 @@ def publish_topology_opened(self, topology_id: ObjectId) -> None: def publish_topology_closed(self, topology_id: ObjectId) -> None: """Publish a TopologyClosedEvent to all topology listeners. - :Parameters: - - `topology_id`: A unique identifier for the topology this server + :param topology_id: A unique identifier for the topology this server is a part of. """ event = TopologyClosedEvent(topology_id) @@ -1755,10 +1705,9 @@ def publish_topology_description_changed( ) -> None: """Publish a TopologyDescriptionChangedEvent to all topology listeners. - :Parameters: - - `previous_description`: The previous topology description. - - `new_description`: The new topology description. - - `topology_id`: A unique identifier for the topology this server + :param previous_description: The previous topology description. + :param new_description: The new topology description. + :param topology_id: A unique identifier for the topology this server is a part of. """ event = TopologyDescriptionChangedEvent(previous_description, new_description, topology_id) diff --git a/pymongo/network.py b/pymongo/network.py index fb4388121e..360d06eb78 100644 --- a/pymongo/network.py +++ b/pymongo/network.py @@ -86,31 +86,30 @@ def command( ) -> _DocumentType: """Execute a command over the socket, or raise socket.error. - :Parameters: - - `conn`: a Connection instance - - `dbname`: name of the database on which to run the command - - `spec`: a command document as an ordered dict type, eg SON. - - `is_mongos`: are we connected to a mongos? - - `read_preference`: a read preference - - `codec_options`: a CodecOptions instance - - `session`: optional ClientSession instance. - - `client`: optional MongoClient instance for updating $clusterTime. - - `check`: raise OperationFailure if there are errors - - `allowable_errors`: errors to ignore if `check` is True - - `address`: the (host, port) of `conn` - - `listeners`: An instance of :class:`~pymongo.monitoring.EventListeners` - - `max_bson_size`: The maximum encoded bson size for this server - - `read_concern`: The read concern for this command. - - `parse_write_concern_error`: Whether to parse the ``writeConcernError`` + :param conn: a Connection instance + :param dbname: name of the database on which to run the command + :param spec: a command document as an ordered dict type, eg SON. + :param is_mongos: are we connected to a mongos? + :param read_preference: a read preference + :param codec_options: a CodecOptions instance + :param session: optional ClientSession instance. + :param client: optional MongoClient instance for updating $clusterTime. + :param check: raise OperationFailure if there are errors + :param allowable_errors: errors to ignore if `check` is True + :param address: the (host, port) of `conn` + :param listeners: An instance of :class:`~pymongo.monitoring.EventListeners` + :param max_bson_size: The maximum encoded bson size for this server + :param read_concern: The read concern for this command. + :param parse_write_concern_error: Whether to parse the ``writeConcernError`` field in the command response. - - `collation`: The collation for this command. - - `compression_ctx`: optional compression Context. - - `use_op_msg`: True if we should use OP_MSG. - - `unacknowledged`: True if this is an unacknowledged command. - - `user_fields` (optional): Response fields that should be decoded + :param collation: The collation for this command. + :param compression_ctx: optional compression Context. + :param use_op_msg: True if we should use OP_MSG. + :param unacknowledged: True if this is an unacknowledged command. + :param user_fields: Response fields that should be decoded using the TypeDecoders from codec_options, passed to bson._decode_all_selective. - - `exhaust_allowed`: True if we should enable OP_MSG exhaustAllowed. + :param exhaust_allowed: True if we should enable OP_MSG exhaustAllowed. """ name = next(iter(spec)) ns = dbname + ".$cmd" diff --git a/pymongo/operations.py b/pymongo/operations.py index d780120656..52bce1ad19 100644 --- a/pymongo/operations.py +++ b/pymongo/operations.py @@ -55,8 +55,7 @@ def __init__(self, document: _DocumentType) -> None: For use with :meth:`~pymongo.collection.Collection.bulk_write`. - :Parameters: - - `document`: The document to insert. If the document is missing an + :param document: The document to insert. If the document is missing an _id field one will be added. """ self._doc = document @@ -92,11 +91,10 @@ def __init__( For use with :meth:`~pymongo.collection.Collection.bulk_write`. - :Parameters: - - `filter`: A query that matches the document to delete. - - `collation` (optional): An instance of + :param filter: A query that matches the document to delete. + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. @@ -157,11 +155,10 @@ def __init__( For use with :meth:`~pymongo.collection.Collection.bulk_write`. - :Parameters: - - `filter`: A query that matches the documents to delete. - - `collation` (optional): An instance of + :param filter: A query that matches the documents to delete. + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. @@ -224,14 +221,13 @@ def __init__( For use with :meth:`~pymongo.collection.Collection.bulk_write`. - :Parameters: - - `filter`: A query that matches the document to replace. - - `replacement`: The new document. - - `upsert` (optional): If ``True``, perform an insert if no documents + :param filter: A query that matches the document to replace. + :param replacement: The new document. + :param upsert: If ``True``, perform an insert if no documents match the filter. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. @@ -377,16 +373,15 @@ def __init__( For use with :meth:`~pymongo.collection.Collection.bulk_write`. - :Parameters: - - `filter`: A query that matches the document to update. - - `update`: The modifications to apply. - - `upsert` (optional): If ``True``, perform an insert if no documents + :param filter: A query that matches the document to update. + :param update: The modifications to apply. + :param upsert: If ``True``, perform an insert if no documents match the filter. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `array_filters` (optional): A list of filters specifying which + :param array_filters: A list of filters specifying which array elements an update should apply. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. @@ -435,16 +430,15 @@ def __init__( For use with :meth:`~pymongo.collection.Collection.bulk_write`. - :Parameters: - - `filter`: A query that matches the documents to update. - - `update`: The modifications to apply. - - `upsert` (optional): If ``True``, perform an insert if no documents + :param filter: A query that matches the documents to update. + :param update: The modifications to apply. + :param upsert: If ``True``, perform an insert if no documents match the filter. - - `collation` (optional): An instance of + :param collation: An instance of :class:`~pymongo.collation.Collation`. - - `array_filters` (optional): A list of filters specifying which + :param array_filters: A list of filters specifying which array elements an update should apply. - - `hint` (optional): An index to use to support the query + :param hint: An index to use to support the query predicate specified either by its string name, or in the same format as passed to :meth:`~pymongo.collection.Collection.create_index` (e.g. @@ -527,10 +521,9 @@ def __init__(self, keys: _IndexKeyHint, **kwargs: Any) -> None: See the MongoDB documentation for a full list of supported options by server version. - :Parameters: - - `keys`: a single key or a list containing (key, direction) pairs + :param keys: a single key or a list containing (key, direction) pairs or keys specifying the index to create. - - `**kwargs` (optional): any additional index creation + :param kwargs: any additional index creation options (see the above list) should be passed as keyword arguments. @@ -569,9 +562,8 @@ def __init__(self, definition: Mapping[str, Any], name: Optional[str] = None) -> For use with :meth:`~pymongo.collection.Collection.create_search_index` and :meth:`~pymongo.collection.Collection.create_search_indexes`. - :Parameters: - - `definition` - The definition for this index. - - `name` (optional) - The name for this index, if present. + :param definition: - The definition for this index. + :param name: - The name for this index, if present. .. versionadded:: 4.5 diff --git a/pymongo/periodic_executor.py b/pymongo/periodic_executor.py index 003b05647c..25d519187f 100644 --- a/pymongo/periodic_executor.py +++ b/pymongo/periodic_executor.py @@ -36,12 +36,11 @@ def __init__( If the target's return value is false, the executor stops. - :Parameters: - - `interval`: Seconds between calls to `target`. - - `min_interval`: Minimum seconds between calls if `wake` is + :param interval: Seconds between calls to `target`. + :param min_interval: Minimum seconds between calls if `wake` is called very often. - - `target`: A function. - - `name`: A name to give the underlying thread. + :param target: A function. + :param name: A name to give the underlying thread. """ # threading.Event and its internal condition variable are expensive # in Python 2, see PYTHON-983. Use a boolean to know when to wake. diff --git a/pymongo/pool.py b/pymongo/pool.py index a9271490f0..bd911b0a94 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -714,11 +714,10 @@ def cancelled(self) -> bool: class Connection: """Store a connection with some metadata. - :Parameters: - - `conn`: a raw connection object - - `pool`: a Pool instance - - `address`: the server's (host, port) - - `id`: the id of this socket in it's pool + :param conn: a raw connection object + :param pool: a Pool instance + :param address: the server's (host, port) + :param id: the id of this socket in it's pool """ def __init__( @@ -949,23 +948,22 @@ def command( ) -> dict[str, Any]: """Execute a command or raise an error. - :Parameters: - - `dbname`: name of the database on which to run the command - - `spec`: a command document as a dict, SON, or mapping object - - `read_preference`: a read preference - - `codec_options`: a CodecOptions instance - - `check`: raise OperationFailure if there are errors - - `allowable_errors`: errors to ignore if `check` is True - - `read_concern`: The read concern for this command. - - `write_concern`: The write concern for this command. - - `parse_write_concern_error`: Whether to parse the + :param dbname: name of the database on which to run the command + :param spec: a command document as a dict, SON, or mapping object + :param read_preference: a read preference + :param codec_options: a CodecOptions instance + :param check: raise OperationFailure if there are errors + :param allowable_errors: errors to ignore if `check` is True + :param read_concern: The read concern for this command. + :param write_concern: The write concern for this command. + :param parse_write_concern_error: Whether to parse the ``writeConcernError`` field in the command response. - - `collation`: The collation for this command. - - `session`: optional ClientSession instance. - - `client`: optional MongoClient for gossipping $clusterTime. - - `retryable_write`: True if this command is a retryable write. - - `publish_events`: Should we publish events for this command? - - `user_fields` (optional): Response fields that should be decoded + :param collation: The collation for this command. + :param session: optional ClientSession instance. + :param client: optional MongoClient for gossipping $clusterTime. + :param retryable_write: True if this command is a retryable write. + :param publish_events: Should we publish events for this command? + :param user_fields: Response fields that should be decoded using the TypeDecoders from codec_options, passed to bson._decode_all_selective. """ @@ -1057,9 +1055,8 @@ def unack_write(self, msg: bytes, max_doc_size: int) -> None: Can raise ConnectionFailure or InvalidDocument. - :Parameters: - - `msg`: bytes, an OP_MSG message. - - `max_doc_size`: size in bytes of the largest document in `msg`. + :param msg: bytes, an OP_MSG message. + :param max_doc_size: size in bytes of the largest document in `msg`. """ self._raise_if_not_writable(True) self.send_message(msg, max_doc_size) @@ -1071,9 +1068,8 @@ def write_command( Can raise ConnectionFailure or OperationFailure. - :Parameters: - - `request_id`: an int. - - `msg`: bytes, the command message. + :param request_id: an int. + :param msg: bytes, the command message. """ self.send_message(msg, 0) reply = self.receive_message(request_id) @@ -1380,10 +1376,9 @@ class PoolState: class Pool: def __init__(self, address: _Address, options: PoolOptions, handshake: bool = True): """ - :Parameters: - - `address`: a (hostname, port) tuple - - `options`: a PoolOptions instance - - `handshake`: whether to call hello for each new Connection + :param address: a (hostname, port) tuple + :param options: a PoolOptions instance + :param handshake: whether to call hello for each new Connection """ if options.pause_enabled: self.state = PoolState.PAUSED @@ -1649,8 +1644,7 @@ def checkout(self, handler: Optional[_MongoClientErrorHandler] = None) -> Iterat Can raise ConnectionFailure or OperationFailure. - :Parameters: - - `handler` (optional): A _MongoClientErrorHandler. + :param handler: A _MongoClientErrorHandler. """ listeners = self.opts._event_listeners if self.enabled_for_cmap: @@ -1804,8 +1798,7 @@ def _get_conn(self, handler: Optional[_MongoClientErrorHandler] = None) -> Conne def checkin(self, conn: Connection) -> None: """Return the connection to the pool, or if it's closed discard it. - :Parameters: - - `conn`: The connection to check into the pool. + :param conn: The connection to check into the pool. """ txn = conn.pinned_txn cursor = conn.pinned_cursor diff --git a/pymongo/read_concern.py b/pymongo/read_concern.py index 0b54ee86f7..eda715f7c0 100644 --- a/pymongo/read_concern.py +++ b/pymongo/read_concern.py @@ -21,8 +21,7 @@ class ReadConcern: """ReadConcern - :Parameters: - - `level`: (string) The read concern level specifies the level of + :param level: (string) The read concern level specifies the level of isolation for read operations. For example, a read operation using a read concern level of ``majority`` will only return data that has been written to a majority of nodes. If the level is left unspecified, the diff --git a/pymongo/read_preferences.py b/pymongo/read_preferences.py index 986cc772bf..7752750c46 100644 --- a/pymongo/read_preferences.py +++ b/pymongo/read_preferences.py @@ -301,15 +301,14 @@ class PrimaryPreferred(_ServerMode): created reads will be routed to an available secondary until the primary of the replica set is discovered. - :Parameters: - - `tag_sets`: The :attr:`~tag_sets` to use if the primary is not + :param tag_sets: The :attr:`~tag_sets` to use if the primary is not available. - - `max_staleness`: (integer, in seconds) The maximum estimated + :param max_staleness: (integer, in seconds) The maximum estimated length of time a replica set secondary can fall behind the primary in replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - - `hedge`: The :attr:`~hedge` to use if the primary is not available. + :param hedge: The :attr:`~hedge` to use if the primary is not available. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -345,14 +344,13 @@ class Secondary(_ServerMode): * When connected to a replica set queries are distributed among secondaries. An error is raised if no secondaries are available. - :Parameters: - - `tag_sets`: The :attr:`~tag_sets` for this read preference. - - `max_staleness`: (integer, in seconds) The maximum estimated + :param tag_sets: The :attr:`~tag_sets` for this read preference. + :param max_staleness: (integer, in seconds) The maximum estimated length of time a replica set secondary can fall behind the primary in replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - - `hedge`: The :attr:`~hedge` for this read preference. + :param hedge: The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -389,14 +387,13 @@ class SecondaryPreferred(_ServerMode): created reads will be routed to the primary of the replica set until an available secondary is discovered. - :Parameters: - - `tag_sets`: The :attr:`~tag_sets` for this read preference. - - `max_staleness`: (integer, in seconds) The maximum estimated + :param tag_sets: The :attr:`~tag_sets` for this read preference. + :param max_staleness: (integer, in seconds) The maximum estimated length of time a replica set secondary can fall behind the primary in replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - - `hedge`: The :attr:`~hedge` for this read preference. + :param hedge: The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -434,14 +431,13 @@ class Nearest(_ServerMode): * When connected to a replica set queries are distributed among all members. - :Parameters: - - `tag_sets`: The :attr:`~tag_sets` for this read preference. - - `max_staleness`: (integer, in seconds) The maximum estimated + :param tag_sets: The :attr:`~tag_sets` for this read preference. + :param max_staleness: (integer, in seconds) The maximum estimated length of time a replica set secondary can fall behind the primary in replication before it will no longer be selected for operations. Default -1, meaning no maximum. If it is set, it must be at least 90 seconds. - - `hedge`: The :attr:`~hedge` for this read preference. + :param hedge: The :attr:`~hedge` for this read preference. .. versionchanged:: 3.11 Added ``hedge`` parameter. @@ -471,8 +467,7 @@ class _AggWritePref: primary read preference. * Otherwise use `pref` read preference. - :Parameters: - - `pref`: The read preference to use on MongoDB 5.0+. + :param pref: The read preference to use on MongoDB 5.0+. """ __slots__ = ("pref", "effective_pref") diff --git a/pymongo/response.py b/pymongo/response.py index 5ff6ca707e..5cdd3e7e8d 100644 --- a/pymongo/response.py +++ b/pymongo/response.py @@ -39,12 +39,11 @@ def __init__( ): """Represent a response from the server. - :Parameters: - - `data`: A network response message. - - `address`: (host, port) of the source server. - - `request_id`: The request id of this operation. - - `duration`: The duration of the operation. - - `from_command`: if the response is the result of a db command. + :param data: A network response message. + :param address: (host, port) of the source server. + :param request_id: The request id of this operation. + :param duration: The duration of the operation. + :param from_command: if the response is the result of a db command. """ self._data = data self._address = address @@ -100,15 +99,14 @@ def __init__( ): """Represent a response to an exhaust cursor's initial query. - :Parameters: - - `data`: A network response message. - - `address`: (host, port) of the source server. - - `conn`: The Connection used for the initial query. - - `request_id`: The request id of this operation. - - `duration`: The duration of the operation. - - `from_command`: If the response is the result of a db command. - - `docs`: List of documents. - - `more_to_come`: Bool indicating whether cursor is ready to be + :param data: A network response message. + :param address: (host, port) of the source server. + :param conn: The Connection used for the initial query. + :param request_id: The request id of this operation. + :param duration: The duration of the operation. + :param from_command: If the response is the result of a db command. + :param docs: List of documents. + :param more_to_come: Bool indicating whether cursor is ready to be exhausted. """ super().__init__(data, address, request_id, duration, from_command, docs) diff --git a/pymongo/results.py b/pymongo/results.py index 20c6023cd2..f57286569b 100644 --- a/pymongo/results.py +++ b/pymongo/results.py @@ -187,9 +187,8 @@ class BulkWriteResult(_WriteResult): def __init__(self, bulk_api_result: dict[str, Any], acknowledged: bool) -> None: """Create a BulkWriteResult instance. - :Parameters: - - `bulk_api_result`: A result dict from the bulk API - - `acknowledged`: Was this write result acknowledged? If ``False`` + :param bulk_api_result: A result dict from the bulk API + :param acknowledged: Was this write result acknowledged? If ``False`` then all properties of this object will raise :exc:`~pymongo.errors.InvalidOperation`. """ diff --git a/pymongo/saslprep.py b/pymongo/saslprep.py index c78ad0f0a9..7fb546f61b 100644 --- a/pymongo/saslprep.py +++ b/pymongo/saslprep.py @@ -57,18 +57,16 @@ def saslprep( def saslprep(data: Any, prohibit_unassigned_code_points: Optional[bool] = True) -> Any: """An implementation of RFC4013 SASLprep. - :Parameters: - - `data`: The string to SASLprep. Unicode strings + :param data: The string to SASLprep. Unicode strings (:class:`str`) are supported. Byte strings (:class:`bytes`) are ignored. - - `prohibit_unassigned_code_points`: True / False. RFC 3454 + :param prohibit_unassigned_code_points: True / False. RFC 3454 and RFCs for various SASL mechanisms distinguish between `queries` (unassigned code points allowed) and `stored strings` (unassigned code points prohibited). Defaults to ``True`` (unassigned code points are prohibited). - :Returns: - The SASLprep'ed version of `data`. + :return: The SASLprep'ed version of `data`. """ prohibited: Any diff --git a/pymongo/server.py b/pymongo/server.py index f431fd0140..5e1a4b3fab 100644 --- a/pymongo/server.py +++ b/pymongo/server.py @@ -109,12 +109,11 @@ def run_operation( cursors. Can raise ConnectionFailure, OperationFailure, etc. - :Parameters: - - `conn`: A Connection instance. - - `operation`: A _Query or _GetMore object. - - `read_preference`: The read preference to use. - - `listeners`: Instance of _EventListeners or None. - - `unpack_res`: A callable that decodes the wire protocol response. + :param conn: A Connection instance. + :param operation: A _Query or _GetMore object. + :param read_preference: The read preference to use. + :param listeners: Instance of _EventListeners or None. + :param unpack_res: A callable that decodes the wire protocol response. """ duration = None assert listeners is not None @@ -274,8 +273,7 @@ def _split_message( ) -> tuple[int, Any, int]: """Return request_id, data, max_doc_size. - :Parameters: - - `message`: (request_id, data, max_doc_size) or (request_id, data) + :param message: (request_id, data, max_doc_size) or (request_id, data) """ if len(message) == 3: return message # type: ignore[return-value] diff --git a/pymongo/server_api.py b/pymongo/server_api.py index 90505bc5ae..4a746008c4 100644 --- a/pymongo/server_api.py +++ b/pymongo/server_api.py @@ -106,12 +106,11 @@ def __init__( ): """Options to configure MongoDB Stable API. - :Parameters: - - `version`: The API version string. Must be one of the values in + :param version: The API version string. Must be one of the values in :class:`ServerApiVersion`. - - `strict` (optional): Set to ``True`` to enable API strict mode. + :param strict: Set to ``True`` to enable API strict mode. Defaults to ``None`` which means "use the server's default". - - `deprecation_errors` (optional): Set to ``True`` to enable + :param deprecation_errors: Set to ``True`` to enable deprecation errors. Defaults to ``None`` which means "use the server's default". @@ -162,9 +161,8 @@ def deprecation_errors(self) -> Optional[bool]: def _add_to_command(cmd: MutableMapping[str, Any], server_api: Optional[ServerApi]) -> None: """Internal helper which adds API versioning options to a command. - :Parameters: - - `cmd`: The command. - - `server_api` (optional): A :class:`ServerApi` or ``None``. + :param cmd: The command. + :param server_api: A :class:`ServerApi` or ``None``. """ if not server_api: return diff --git a/pymongo/server_description.py b/pymongo/server_description.py index 3b4131f327..7943f4f5c8 100644 --- a/pymongo/server_description.py +++ b/pymongo/server_description.py @@ -29,12 +29,11 @@ class ServerDescription: """Immutable representation of one server. - :Parameters: - - `address`: A (host, port) pair - - `hello`: Optional Hello instance - - `round_trip_time`: Optional float - - `error`: Optional, the last error attempting to connect to the server - - `round_trip_time`: Optional float, the min latency from the most recent samples + :param address: A (host, port) pair + :param hello: Optional Hello instance + :param round_trip_time: Optional float + :param error: Optional, the last error attempting to connect to the server + :param round_trip_time: Optional float, the min latency from the most recent samples """ __slots__ = ( diff --git a/pymongo/topology.py b/pymongo/topology.py index 092c7d92af..81316e3e26 100644 --- a/pymongo/topology.py +++ b/pymongo/topology.py @@ -215,13 +215,12 @@ def select_servers( ) -> list[Server]: """Return a list of Servers matching selector, or time out. - :Parameters: - - `selector`: function that takes a list of Servers and returns + :param selector: function that takes a list of Servers and returns a subset of them. - - `server_selection_timeout` (optional): maximum seconds to wait. + :param server_selection_timeout: maximum seconds to wait. If not provided, the default value common.SERVER_SELECTION_TIMEOUT is used. - - `address`: optional server address to select. + :param address: optional server address to select. Calls self.open() if needed. @@ -318,9 +317,8 @@ def select_server_by_address( servers. Time out after "server_selection_timeout" if the server cannot be reached. - :Parameters: - - `address`: A (host, port) pair. - - `server_selection_timeout` (optional): maximum seconds to wait. + :param address: A (host, port) pair. + :param server_selection_timeout: maximum seconds to wait. If not provided, the default value common.SERVER_SELECTION_TIMEOUT is used. diff --git a/pymongo/topology_description.py b/pymongo/topology_description.py index e51378a022..99243d7ce2 100644 --- a/pymongo/topology_description.py +++ b/pymongo/topology_description.py @@ -69,14 +69,13 @@ def __init__( ) -> None: """Representation of a deployment of MongoDB servers. - :Parameters: - - `topology_type`: initial type - - `server_descriptions`: dict of (address, ServerDescription) for + :param topology_type: initial type + :param server_descriptions: dict of (address, ServerDescription) for all seeds - - `replica_set_name`: replica set name or None - - `max_set_version`: greatest setVersion seen from a primary, or None - - `max_election_id`: greatest electionId seen from a primary, or None - - `topology_settings`: a TopologySettings + :param replica_set_name: replica set name or None + :param max_set_version: greatest setVersion seen from a primary, or None + :param max_election_id: greatest electionId seen from a primary, or None + :param topology_settings: a TopologySettings """ self._topology_type = topology_type self._replica_set_name = replica_set_name @@ -283,12 +282,11 @@ def apply_selector( ) -> list[ServerDescription]: """List of servers matching the provided selector(s). - :Parameters: - - `selector`: a callable that takes a Selection as input and returns + :param selector: a callable that takes a Selection as input and returns a Selection as output. For example, an instance of a read preference from :mod:`~pymongo.read_preferences`. - - `address` (optional): A server address to select. - - `custom_selector` (optional): A callable that augments server + :param address: A server address to select. + :param custom_selector: A callable that augments server selection rules. Accepts a list of :class:`~pymongo.server_description.ServerDescription` objects and return a list of server descriptions that should be considered @@ -333,8 +331,7 @@ def has_readable_server(self, read_preference: _ServerMode = ReadPreference.PRIM """Does this topology have any readable servers available matching the given read preference? - :Parameters: - - `read_preference`: an instance of a read preference from + :param read_preference: an instance of a read preference from :mod:`~pymongo.read_preferences`. Defaults to :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. @@ -384,9 +381,8 @@ def updated_topology_description( ) -> TopologyDescription: """Return an updated copy of a TopologyDescription. - :Parameters: - - `topology_description`: the current TopologyDescription - - `server_description`: a new ServerDescription that resulted from + :param topology_description: the current TopologyDescription + :param server_description: a new ServerDescription that resulted from a hello call Called after attempting (successfully or not) to call hello on the @@ -489,9 +485,8 @@ def _updated_topology_description_srv_polling( ) -> TopologyDescription: """Return an updated copy of a TopologyDescription. - :Parameters: - - `topology_description`: the current TopologyDescription - - `seedlist`: a list of new seeds new ServerDescription that resulted from + :param topology_description: the current TopologyDescription + :param seedlist: a list of new seeds new ServerDescription that resulted from a hello call """ assert topology_description.topology_type in SRV_POLLING_TOPOLOGIES diff --git a/pymongo/uri_parser.py b/pymongo/uri_parser.py index d5292c1b54..7f4ef57f9c 100644 --- a/pymongo/uri_parser.py +++ b/pymongo/uri_parser.py @@ -56,8 +56,7 @@ def _unquoted_percent(s: str) -> bool: """Check for unescaped percent signs. - :Parameters: - - `s`: A string. `s` can have things like '%25', '%2525', + :param s: A string. `s` can have things like '%25', '%2525', and '%E2%85%A8' but cannot have unquoted percent like '%foo'. """ for i in range(len(s)): @@ -78,8 +77,7 @@ def parse_userinfo(userinfo: str) -> tuple[str, str]: Returns a 2-tuple containing the unescaped username followed by the unescaped password. - :Parameters: - - `userinfo`: A string of the form : + :param userinfo: A string of the form : """ if "@" in userinfo or userinfo.count(":") > 1 or _unquoted_percent(userinfo): raise InvalidURI( @@ -103,10 +101,9 @@ def parse_ipv6_literal_host( Returns a 2-tuple of IPv6 literal followed by port where port is default_port if it wasn't specified in entity. - :Parameters: - - `entity`: A string that represents an IPv6 literal enclosed + :param entity: A string that represents an IPv6 literal enclosed in braces (e.g. '[::1]' or '[::1]:27017'). - - `default_port`: The port number to use when one wasn't + :param default_port: The port number to use when one wasn't specified in entity. """ if entity.find("]") == -1: @@ -125,10 +122,9 @@ def parse_host(entity: str, default_port: Optional[int] = DEFAULT_PORT) -> _Addr Returns a 2-tuple of host followed by port where port is default_port if it wasn't specified in the string. - :Parameters: - - `entity`: A host or host:port string where host could be a + :param entity: A host or host:port string where host could be a hostname or IP address. - - `default_port`: The port number to use when one wasn't + :param default_port: The port number to use when one wasn't specified in entity. """ host = entity @@ -192,8 +188,7 @@ def _handle_security_options(options: _CaseInsensitiveDictionary) -> _CaseInsens """Raise appropriate errors when conflicting TLS options are present in the options dictionary. - :Parameters: - - `options`: Instance of _CaseInsensitiveDictionary containing + :param options: Instance of _CaseInsensitiveDictionary containing MongoDB URI options. """ # Implicitly defined options must not be explicitly specified. @@ -247,8 +242,7 @@ def _handle_option_deprecations(options: _CaseInsensitiveDictionary) -> _CaseIns options dictionary. Removes deprecated option key, value pairs if the options dictionary is found to also have the renamed option. - :Parameters: - - `options`: Instance of _CaseInsensitiveDictionary containing + :param options: Instance of _CaseInsensitiveDictionary containing MongoDB URI options. """ for optname in list(options): @@ -286,8 +280,7 @@ def _normalize_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveD """Normalizes option names in the options dictionary by converting them to their internally-used names. - :Parameters: - - `options`: Instance of _CaseInsensitiveDictionary containing + :param options: Instance of _CaseInsensitiveDictionary containing MongoDB URI options. """ # Expand the tlsInsecure option. @@ -312,9 +305,8 @@ def validate_options(opts: Mapping[str, Any], warn: bool = False) -> MutableMapp False then errors will be thrown for invalid options, otherwise they will be ignored and a warning will be issued. - :Parameters: - - `opts`: A dict of MongoDB URI options. - - `warn` (optional): If ``True`` then warnings will be logged and + :param opts: A dict of MongoDB URI options. + :param warn: If ``True`` then warnings will be logged and invalid options will be ignored. Otherwise invalid options will cause errors. """ @@ -327,13 +319,12 @@ def split_options( """Takes the options portion of a MongoDB URI, validates each option and returns the options in a dictionary. - :Parameters: - - `opt`: A string representing MongoDB URI options. - - `validate`: If ``True`` (the default), validate and normalize all + :param opt: A string representing MongoDB URI options. + :param validate: If ``True`` (the default), validate and normalize all options. - - `warn`: If ``False`` (the default), suppress all warnings raised + :param warn: If ``False`` (the default), suppress all warnings raised during validation of options. - - `normalize`: If ``True`` (the default), renames all options to their + :param normalize: If ``True`` (the default), renames all options to their internally-used names. """ and_idx = opts.find("&") @@ -375,9 +366,8 @@ def split_hosts(hosts: str, default_port: Optional[int] = DEFAULT_PORT) -> list[ Returns a set of 2-tuples containing the host name (or IP) followed by port number. - :Parameters: - - `hosts`: A string of the form host1[:port],host2[:port],... - - `default_port`: The port number to use when one wasn't specified + :param hosts: A string of the form host1[:port],host2[:port],... + :param default_port: The port number to use when one wasn't specified for a host. """ nodes = [] @@ -442,21 +432,20 @@ def parse_uri( If the URI scheme is "mongodb+srv://" DNS SRV and TXT lookups will be done to build nodelist and options. - :Parameters: - - `uri`: The MongoDB URI to parse. - - `default_port`: The port number to use when one wasn't specified + :param uri: The MongoDB URI to parse. + :param default_port: The port number to use when one wasn't specified for a host in the URI. - - `validate` (optional): If ``True`` (the default), validate and + :param validate: If ``True`` (the default), validate and normalize all options. Default: ``True``. - - `warn` (optional): When validating, if ``True`` then will warn + :param warn: When validating, if ``True`` then will warn the user then ignore any invalid options or values. If ``False``, validation will error when options are unsupported or values are invalid. Default: ``False``. - - `normalize` (optional): If ``True``, convert names of URI options + :param normalize: If ``True``, convert names of URI options to their internally-used names. Default: ``True``. - - `connect_timeout` (optional): The maximum time in milliseconds to + :param connect_timeout: The maximum time in milliseconds to wait for a response from the DNS server. - - `srv_service_name` (optional): A custom SRV service name + :param srv_service_name: A custom SRV service name .. versionchanged:: 4.6 The delimiting slash (``/``) between hosts and connection options is now optional. diff --git a/pymongo/write_concern.py b/pymongo/write_concern.py index 893d05b688..2fa9532a3e 100644 --- a/pymongo/write_concern.py +++ b/pymongo/write_concern.py @@ -31,23 +31,22 @@ def validate_boolean(option: str, value: Any) -> bool: class WriteConcern: """WriteConcern - :Parameters: - - `w`: (integer or string) Used with replication, write operations + :param w: (integer or string) Used with replication, write operations will block until they have been replicated to the specified number or tagged set of servers. `w=` always includes the replica set primary (e.g. w=3 means write to the primary and wait until replicated to **two** secondaries). **w=0 disables acknowledgement of write operations and can not be used with other write concern options.** - - `wtimeout`: (integer) Used in conjunction with `w`. Specify a value + :param wtimeout: (integer) Used in conjunction with `w`. Specify a value in milliseconds to control how long to wait for write propagation to complete. If replication does not complete in the given timeframe, a timeout exception is raised. - - `j`: If ``True`` block until write operations have been committed + :param j: If ``True`` block until write operations have been committed to the journal. Cannot be used in combination with `fsync`. Write operations will fail with an exception if this option is used when the server is running without journaling. - - `fsync`: If ``True`` and the server is running without journaling, + :param fsync: If ``True`` and the server is running without journaling, blocks until the server has synced all data files to disk. If the server is running with journaling, this acts the same as the `j` option, blocking until write operations have been committed to the diff --git a/test/test_comment.py b/test/test_comment.py index 047bc3f550..ffbf8d51ca 100644 --- a/test/test_comment.py +++ b/test/test_comment.py @@ -89,7 +89,7 @@ def _test_ops( self.assertTrue(tested) if h not in [coll.aggregate_raw_batches]: self.assertIn( - "`comment` (optional):", + ":param comment:", h.__doc__, ) if h not in already_supported: From d4dfd4a044b8dbf2d364a884aeb219ca63f897c7 Mon Sep 17 00:00:00 2001 From: Casey Clements Date: Thu, 30 Nov 2023 14:21:10 -0500 Subject: [PATCH 020/221] PYTHON-3036 Improve error message for unknown MongoClient options (#1440) --- doc/contributors.rst | 1 + pymongo/common.py | 30 ++++++++++++++++++++++-------- test/test_client.py | 9 +++++++++ test/test_uri_parser.py | 6 ++++++ 4 files changed, 38 insertions(+), 8 deletions(-) diff --git a/doc/contributors.rst b/doc/contributors.rst index 2a4ca1ea47..b6e143440b 100644 --- a/doc/contributors.rst +++ b/doc/contributors.rst @@ -98,3 +98,4 @@ The following is a list of people who have contributed to - Dainis Gorbunovs (DainisGorbunovs) - Iris Ho (sleepyStick) - Stephan Hof (stephan-hof) +- Casey Clements (caseyclements) diff --git a/pymongo/common.py b/pymongo/common.py index bda294af93..41d1e1050e 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -20,6 +20,7 @@ import inspect import warnings from collections import OrderedDict, abc +from difflib import get_close_matches from typing import ( TYPE_CHECKING, Any, @@ -162,9 +163,12 @@ def clean_node(node: str) -> tuple[str, int]: return host.lower(), port -def raise_config_error(key: str, dummy: Any) -> NoReturn: +def raise_config_error(key: str, suggestions: Optional[list] = None) -> NoReturn: """Raise ConfigurationError with the given key name.""" - raise ConfigurationError(f"Unknown option {key}") + msg = f"Unknown option: {key}." + if suggestions: + msg += f" Did you mean one of ({', '.join(suggestions)}) or maybe a camelCase version of one? Refer to docstring." + raise ConfigurationError(msg) # Mapping of URI uuid representation options to valid subtypes. @@ -810,14 +814,24 @@ def validate_auth_option(option: str, value: Any) -> tuple[str, Any]: """Validate optional authentication parameters.""" lower, value = validate(option, value) if lower not in _AUTH_OPTIONS: - raise ConfigurationError(f"Unknown authentication option: {option}") + raise ConfigurationError(f"Unknown option: {option}. Must be in {_AUTH_OPTIONS}") return option, value +def _get_validator( + key: str, validators: dict[str, Callable[[Any, Any], Any]], normed_key: Optional[str] = None +) -> Callable: + normed_key = normed_key or key + try: + return validators[normed_key] + except KeyError: + suggestions = get_close_matches(normed_key, validators, cutoff=0.2) + raise_config_error(key, suggestions) + + def validate(option: str, value: Any) -> tuple[str, Any]: """Generic validation function.""" - lower = option.lower() - validator = VALIDATORS.get(lower, raise_config_error) + validator = _get_validator(option, VALIDATORS, normed_key=option.lower()) value = validator(option, value) return option, value @@ -855,15 +869,15 @@ def get_setter_key(x: str) -> str: for opt, value in options.items(): normed_key = get_normed_key(opt) try: - validator = URI_OPTIONS_VALIDATOR_MAP.get(normed_key, raise_config_error) - value = validator(opt, value) # noqa: PLW2901 + validator = _get_validator(opt, URI_OPTIONS_VALIDATOR_MAP, normed_key=normed_key) + validated = validator(opt, value) except (ValueError, TypeError, ConfigurationError) as exc: if warn: warnings.warn(str(exc), stacklevel=2) else: raise else: - validated_options[get_setter_key(normed_key)] = value + validated_options[get_setter_key(normed_key)] = validated return validated_options diff --git a/test/test_client.py b/test/test_client.py index aceb153120..089b1673b5 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -21,6 +21,7 @@ import datetime import gc import os +import re import signal import socket import struct @@ -535,6 +536,14 @@ def test_client_options(self): self.assertIsInstance(c.options.retry_writes, bool) self.assertIsInstance(c.options.retry_reads, bool) + def test_validate_suggestion(self): + """Validate kwargs in constructor.""" + for typo in ["auth", "Auth", "AUTH"]: + expected = f"Unknown option: {typo}. Did you mean one of (authsource, authmechanism, authoidcallowedhosts) or maybe a camelCase version of one? Refer to docstring." + expected = re.escape(expected) + with self.assertRaisesRegex(ConfigurationError, expected): + MongoClient(**{typo: "standard"}) # type: ignore[arg-type] + class TestClient(IntegrationTest): def test_multiple_uris(self): diff --git a/test/test_uri_parser.py b/test/test_uri_parser.py index d5a25f5900..a4ad908e10 100644 --- a/test/test_uri_parser.py +++ b/test/test_uri_parser.py @@ -144,6 +144,12 @@ def test_split_options(self): self.assertEqual({"authsource": "foobar"}, split_options("authSource=foobar")) self.assertEqual({"maxpoolsize": 50}, split_options("maxpoolsize=50")) + # Test suggestions given when invalid kwarg passed + + expected = r"Unknown option: auth. Did you mean one of \(authsource, authmechanism, timeoutms\) or maybe a camelCase version of one\? Refer to docstring." + with self.assertRaisesRegex(ConfigurationError, expected): + split_options("auth=GSSAPI") + def test_parse_uri(self): self.assertRaises(InvalidURI, parse_uri, "http://foobar.com") self.assertRaises(InvalidURI, parse_uri, "http://foo@foobar.com") From 2dd33a2c150232b1d363d1000b0cf4b17bd6d889 Mon Sep 17 00:00:00 2001 From: Casey Clements Date: Thu, 30 Nov 2023 20:39:40 -0500 Subject: [PATCH 021/221] PYTHON-4055 Add xunit-results to .gitignore (#1445) --- .gitignore | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 77483d26b2..b0f418aa93 100644 --- a/.gitignore +++ b/.gitignore @@ -9,12 +9,14 @@ build/ doc/_build/ dist/ tools/settings.py +drivers-evergreen-tools pymongo.egg-info/ *.so -*.egg +*.egg* .tox mongocryptd.pid .idea/ +.vscode/ .nova/ venv/ secrets-export.sh @@ -25,3 +27,7 @@ test/lambda/env.json test/lambda/mongodb/pymongo/* test/lambda/mongodb/gridfs/* test/lambda/mongodb/bson/* + +# test results and logs +xunit-results/ +server.log From fa25311726f95da1aa1c1cdd9179f2ad615a93bd Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 1 Dec 2023 15:29:44 -0600 Subject: [PATCH 022/221] PYTHON-4068 Fix AWS ECS Task (#1444) --- .evergreen/config.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 46adaf4e74..4d7e70f55c 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -655,7 +655,14 @@ functions: exit 0 fi ${PREPARE_SHELL} - .evergreen/run-mongodb-aws-test.sh session-creds + set -ex + cd ${DRIVERS_TOOLS}/.evergreen/auth_aws + . ./activate-authawsvenv.sh + . aws_setup.sh ecs + export MONGODB_BINARIES="${MONGODB_BINARIES}"; + export PROJECT_DIRECTORY="${PROJECT_DIRECTORY}"; + python aws_tester.py ecs + cd - "cleanup": - command: shell.exec From b1939e147073ad601666820a1b94af45949a8a27 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 1 Dec 2023 14:33:37 -0800 Subject: [PATCH 023/221] PYTHON-2822 Add server connectionId to command monitoring events (#1438) --- doc/changelog.rst | 10 ++ pymongo/hello.py | 4 + pymongo/message.py | 3 + pymongo/monitoring.py | 50 ++++++++- pymongo/network.py | 9 +- pymongo/pool.py | 2 + pymongo/server.py | 9 +- .../pre-42-server-connection-id.json | 101 ++++++++++++++++++ .../server-connection-id.json | 101 ++++++++++++++++++ test/test_monitoring.py | 9 +- test/unified_format.py | 11 ++ 11 files changed, 298 insertions(+), 11 deletions(-) create mode 100644 test/command_monitoring/pre-42-server-connection-id.json create mode 100644 test/command_monitoring/server-connection-id.json diff --git a/doc/changelog.rst b/doc/changelog.rst index 4a2a634f41..898ab51874 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,6 +1,16 @@ Changelog ========= +Changes in Version 4.7 +------------------------ + +PyMongo 4.7 brings a number of improvements including: + +- Added the :class:`~pymongo.hello.Hello.server_connection_id`, + :attr:`pymongo.monitoring.CommandStartedEvent.server_connection_id`, + :attr:`pymongo.monitoring.CommandSucceededEvent.server_connection_id`, and + :attr:`pymongo.monitoring.CommandFailedEvent.server_connection_id` properties. + Changes in Version 4.6.1 ------------------------ diff --git a/pymongo/hello.py b/pymongo/hello.py index d38c285ab7..0f6d7a399a 100644 --- a/pymongo/hello.py +++ b/pymongo/hello.py @@ -218,3 +218,7 @@ def service_id(self) -> Optional[ObjectId]: @property def hello_ok(self) -> bool: return self._doc.get("helloOk", False) + + @property + def connection_id(self) -> Optional[int]: + return self._doc.get("connectionId") diff --git a/pymongo/message.py b/pymongo/message.py index b59b88ab29..7b93015a4c 100644 --- a/pymongo/message.py +++ b/pymongo/message.py @@ -1082,6 +1082,7 @@ def _start( self.db_name, request_id, self.conn.address, + self.conn.server_connection_id, self.op_id, self.conn.service_id, ) @@ -1095,6 +1096,7 @@ def _succeed(self, request_id: int, reply: _DocumentOut, duration: timedelta) -> self.name, request_id, self.conn.address, + self.conn.server_connection_id, self.op_id, self.conn.service_id, database_name=self.db_name, @@ -1108,6 +1110,7 @@ def _fail(self, request_id: int, failure: _DocumentOut, duration: timedelta) -> self.name, request_id, self.conn.address, + self.conn.server_connection_id, self.op_id, self.conn.service_id, database_name=self.db_name, diff --git a/pymongo/monitoring.py b/pymongo/monitoring.py index 7bd512dada..164b53ed2d 100644 --- a/pymongo/monitoring.py +++ b/pymongo/monitoring.py @@ -537,7 +537,15 @@ def _is_speculative_authenticate(command_name: str, doc: Mapping[str, Any]) -> b class _CommandEvent: """Base class for command events.""" - __slots__ = ("__cmd_name", "__rqst_id", "__conn_id", "__op_id", "__service_id", "__db") + __slots__ = ( + "__cmd_name", + "__rqst_id", + "__conn_id", + "__op_id", + "__service_id", + "__db", + "__server_conn_id", + ) def __init__( self, @@ -547,6 +555,7 @@ def __init__( operation_id: Optional[int], service_id: Optional[ObjectId] = None, database_name: str = "", + server_connection_id: Optional[int] = None, ) -> None: self.__cmd_name = command_name self.__rqst_id = request_id @@ -554,6 +563,7 @@ def __init__( self.__op_id = operation_id self.__service_id = service_id self.__db = database_name + self.__server_conn_id = server_connection_id @property def command_name(self) -> str: @@ -591,6 +601,14 @@ def database_name(self) -> str: """ return self.__db + @property + def server_connection_id(self) -> Optional[int]: + """The server-side connection id for the connection this command was sent on, or ``None``. + + .. versionadded:: 4.7 + """ + return self.__server_conn_id + class CommandStartedEvent(_CommandEvent): """Event published when a command starts. @@ -614,6 +632,7 @@ def __init__( connection_id: _Address, operation_id: Optional[int], service_id: Optional[ObjectId] = None, + server_connection_id: Optional[int] = None, ) -> None: if not command: raise ValueError(f"{command!r} is not a valid command") @@ -626,6 +645,7 @@ def __init__( operation_id, service_id=service_id, database_name=database_name, + server_connection_id=server_connection_id, ) cmd_name = command_name.lower() if cmd_name in _SENSITIVE_COMMANDS or _is_speculative_authenticate(cmd_name, command): @@ -644,13 +664,16 @@ def database_name(self) -> str: return super().database_name def __repr__(self) -> str: - return ("<{} {} db: {!r}, command: {!r}, operation_id: {}, service_id: {}>").format( + return ( + "<{} {} db: {!r}, command: {!r}, operation_id: {}, service_id: {}, server_connection_id: {}>" + ).format( self.__class__.__name__, self.connection_id, self.database_name, self.command_name, self.operation_id, self.service_id, + self.server_connection_id, ) @@ -680,6 +703,7 @@ def __init__( operation_id: Optional[int], service_id: Optional[ObjectId] = None, database_name: str = "", + server_connection_id: Optional[int] = None, ) -> None: super().__init__( command_name, @@ -688,6 +712,7 @@ def __init__( operation_id, service_id=service_id, database_name=database_name, + server_connection_id=server_connection_id, ) self.__duration_micros = _to_micros(duration) cmd_name = command_name.lower() @@ -708,7 +733,7 @@ def reply(self) -> _DocumentOut: def __repr__(self) -> str: return ( - "<{} {} db: {!r}, command: {!r}, operation_id: {}, duration_micros: {}, service_id: {}>" + "<{} {} db: {!r}, command: {!r}, operation_id: {}, duration_micros: {}, service_id: {}, server_connection_id: {}>" ).format( self.__class__.__name__, self.connection_id, @@ -717,6 +742,7 @@ def __repr__(self) -> str: self.operation_id, self.duration_micros, self.service_id, + self.server_connection_id, ) @@ -746,6 +772,7 @@ def __init__( operation_id: Optional[int], service_id: Optional[ObjectId] = None, database_name: str = "", + server_connection_id: Optional[int] = None, ) -> None: super().__init__( command_name, @@ -754,6 +781,7 @@ def __init__( operation_id, service_id=service_id, database_name=database_name, + server_connection_id=server_connection_id, ) self.__duration_micros = _to_micros(duration) self.__failure = failure @@ -771,7 +799,7 @@ def failure(self) -> _DocumentOut: def __repr__(self) -> str: return ( "<{} {} db: {!r}, command: {!r}, operation_id: {}, duration_micros: {}, " - "failure: {!r}, service_id: {}>" + "failure: {!r}, service_id: {}, server_connection_id: {}>" ).format( self.__class__.__name__, self.connection_id, @@ -781,6 +809,7 @@ def __repr__(self) -> str: self.duration_micros, self.failure, self.service_id, + self.server_connection_id, ) @@ -1453,6 +1482,7 @@ def publish_command_start( database_name: str, request_id: int, connection_id: _Address, + server_connection_id: Optional[int], op_id: Optional[int] = None, service_id: Optional[ObjectId] = None, ) -> None: @@ -1470,7 +1500,13 @@ def publish_command_start( if op_id is None: op_id = request_id event = CommandStartedEvent( - command, database_name, request_id, connection_id, op_id, service_id=service_id + command, + database_name, + request_id, + connection_id, + op_id, + service_id=service_id, + server_connection_id=server_connection_id, ) for subscriber in self.__command_listeners: try: @@ -1485,6 +1521,7 @@ def publish_command_success( command_name: str, request_id: int, connection_id: _Address, + server_connection_id: Optional[int], op_id: Optional[int] = None, service_id: Optional[ObjectId] = None, speculative_hello: bool = False, @@ -1518,6 +1555,7 @@ def publish_command_success( op_id, service_id, database_name=database_name, + server_connection_id=server_connection_id, ) for subscriber in self.__command_listeners: try: @@ -1532,6 +1570,7 @@ def publish_command_failure( command_name: str, request_id: int, connection_id: _Address, + server_connection_id: Optional[int], op_id: Optional[int] = None, service_id: Optional[ObjectId] = None, database_name: str = "", @@ -1560,6 +1599,7 @@ def publish_command_failure( op_id, service_id=service_id, database_name=database_name, + server_connection_id=server_connection_id, ) for subscriber in self.__command_listeners: try: diff --git a/pymongo/network.py b/pymongo/network.py index 360d06eb78..a6a308c3d7 100644 --- a/pymongo/network.py +++ b/pymongo/network.py @@ -167,7 +167,12 @@ def command( assert listeners is not None assert address is not None listeners.publish_command_start( - orig, dbname, request_id, address, service_id=conn.service_id + orig, + dbname, + request_id, + address, + conn.server_connection_id, + service_id=conn.service_id, ) start = datetime.datetime.now() @@ -209,6 +214,7 @@ def command( name, request_id, address, + conn.server_connection_id, service_id=conn.service_id, database_name=dbname, ) @@ -223,6 +229,7 @@ def command( name, request_id, address, + conn.server_connection_id, service_id=conn.service_id, speculative_hello=speculative_hello, database_name=dbname, diff --git a/pymongo/pool.py b/pymongo/pool.py index bd911b0a94..e7b5a0dd42 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -762,6 +762,7 @@ def __init__( self.more_to_come: bool = False # For load balancer support. self.service_id: Optional[ObjectId] = None + self.server_connection_id: Optional[int] = None # When executing a transaction in load balancing mode, this flag is # set to true to indicate that the session now owns the connection. self.pinned_txn = False @@ -902,6 +903,7 @@ def _hello( self.compression_context = ctx self.op_msg_enabled = True + self.server_connection_id = hello.connection_id if creds: self.negotiated_mechs = hello.sasl_supported_mechs if auth_ctx: diff --git a/pymongo/server.py b/pymongo/server.py index 5e1a4b3fab..cd7a013321 100644 --- a/pymongo/server.py +++ b/pymongo/server.py @@ -135,7 +135,12 @@ def run_operation( cmd["$db"] = dbn assert listeners is not None listeners.publish_command_start( - cmd, dbn, request_id, conn.address, service_id=conn.service_id + cmd, + dbn, + request_id, + conn.address, + conn.server_connection_id, + service_id=conn.service_id, ) start = datetime.now() @@ -178,6 +183,7 @@ def run_operation( operation.name, request_id, conn.address, + conn.server_connection_id, service_id=conn.service_id, database_name=dbn, ) @@ -204,6 +210,7 @@ def run_operation( operation.name, request_id, conn.address, + conn.server_connection_id, service_id=conn.service_id, database_name=dbn, ) diff --git a/test/command_monitoring/pre-42-server-connection-id.json b/test/command_monitoring/pre-42-server-connection-id.json new file mode 100644 index 0000000000..141fbe584f --- /dev/null +++ b/test/command_monitoring/pre-42-server-connection-id.json @@ -0,0 +1,101 @@ +{ + "description": "pre-42-server-connection-id", + "schemaVersion": "1.6", + "runOnRequirements": [ + { + "maxServerVersion": "4.0.99" + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent", + "commandFailedEvent" + ] + } + }, + { + "database": { + "id": "database", + "client": "client", + "databaseName": "server-connection-id-tests" + } + }, + { + "collection": { + "id": "collection", + "database": "database", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "server-connection-id-tests", + "collectionName": "coll", + "documents": [] + } + ], + "tests": [ + { + "description": "command events do not include server connection id", + "operations": [ + { + "name": "insertOne", + "object": "collection", + "arguments": { + "document": { + "x": 1 + } + } + }, + { + "name": "find", + "object": "collection", + "arguments": { + "filter": { + "$or": true + } + }, + "expectError": { + "isError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "hasServerConnectionId": false + } + }, + { + "commandSucceededEvent": { + "commandName": "insert", + "hasServerConnectionId": false + } + }, + { + "commandStartedEvent": { + "commandName": "find", + "hasServerConnectionId": false + } + }, + { + "commandFailedEvent": { + "commandName": "find", + "hasServerConnectionId": false + } + } + ] + } + ] + } + ] +} diff --git a/test/command_monitoring/server-connection-id.json b/test/command_monitoring/server-connection-id.json new file mode 100644 index 0000000000..a8f27637fc --- /dev/null +++ b/test/command_monitoring/server-connection-id.json @@ -0,0 +1,101 @@ +{ + "description": "server-connection-id", + "schemaVersion": "1.6", + "runOnRequirements": [ + { + "minServerVersion": "4.2" + } + ], + "createEntities": [ + { + "client": { + "id": "client", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent", + "commandFailedEvent" + ] + } + }, + { + "database": { + "id": "database", + "client": "client", + "databaseName": "server-connection-id-tests" + } + }, + { + "collection": { + "id": "collection", + "database": "database", + "collectionName": "coll" + } + } + ], + "initialData": [ + { + "databaseName": "server-connection-id-tests", + "collectionName": "coll", + "documents": [] + } + ], + "tests": [ + { + "description": "command events include server connection id", + "operations": [ + { + "name": "insertOne", + "object": "collection", + "arguments": { + "document": { + "x": 1 + } + } + }, + { + "name": "find", + "object": "collection", + "arguments": { + "filter": { + "$or": true + } + }, + "expectError": { + "isError": true + } + } + ], + "expectEvents": [ + { + "client": "client", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "hasServerConnectionId": true + } + }, + { + "commandSucceededEvent": { + "commandName": "insert", + "hasServerConnectionId": true + } + }, + { + "commandStartedEvent": { + "commandName": "find", + "hasServerConnectionId": true + } + }, + { + "commandFailedEvent": { + "commandName": "find", + "hasServerConnectionId": true + } + } + ] + } + ] + } + ] +} diff --git a/test/test_monitoring.py b/test/test_monitoring.py index 26a1da3f81..868078d5c8 100644 --- a/test/test_monitoring.py +++ b/test/test_monitoring.py @@ -1085,7 +1085,7 @@ def test_sensitive_commands(self): self.listener.reset() cmd = SON([("getnonce", 1)]) - listeners.publish_command_start(cmd, "pymongo_test", 12345, self.client.address) # type: ignore[arg-type] + listeners.publish_command_start(cmd, "pymongo_test", 12345, self.client.address, None) # type: ignore[arg-type] delta = datetime.timedelta(milliseconds=100) listeners.publish_command_success( delta, @@ -1093,6 +1093,7 @@ def test_sensitive_commands(self): "getnonce", 12345, self.client.address, # type: ignore[arg-type] + None, database_name="pymongo_test", ) started = self.listener.started_events[0] @@ -1161,7 +1162,7 @@ def test_command_event_repr(self): self.assertEqual( repr(event), "", + "command: 'ping', operation_id: 2, service_id: None, server_connection_id: None>", ) delta = datetime.timedelta(milliseconds=100) event = monitoring.CommandSucceededEvent( @@ -1171,7 +1172,7 @@ def test_command_event_repr(self): repr(event), "", + "service_id: None, server_connection_id: None>", ) event = monitoring.CommandFailedEvent( delta, {"ok": 0}, "ping", request_id, connection_id, operation_id, database_name=db_name @@ -1180,7 +1181,7 @@ def test_command_event_repr(self): repr(event), "", + "failure: {'ok': 0}, service_id: None, server_connection_id: None>", ) def test_server_heartbeat_event_repr(self): diff --git a/test/unified_format.py b/test/unified_format.py index 99758989c9..d4080bdc0b 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -779,6 +779,14 @@ def assertHasServiceId(self, spec, actual): else: self.test.assertIsNone(actual.service_id) + def assertHasServerConnectionId(self, spec, actual): + if "hasServerConnectionId" in spec: + if spec.get("hasServerConnectionId"): + self.test.assertIsNotNone(actual.server_connection_id) + self.test.assertIsInstance(actual.server_connection_id, int) + else: + self.test.assertIsNone(actual.server_connection_id) + def match_server_description(self, actual: ServerDescription, spec: dict) -> None: if "type" in spec: self.test.assertEqual(actual.server_type_name, spec["type"]) @@ -807,6 +815,7 @@ def match_event(self, event_type, expectation, actual): self.match_result(command, actual.command) self.assertHasDatabaseName(spec, actual) self.assertHasServiceId(spec, actual) + self.assertHasServerConnectionId(spec, actual) elif name == "commandSucceededEvent": self.test.assertIsInstance(actual, CommandSucceededEvent) reply = spec.get("reply") @@ -814,10 +823,12 @@ def match_event(self, event_type, expectation, actual): self.match_result(reply, actual.reply) self.assertHasDatabaseName(spec, actual) self.assertHasServiceId(spec, actual) + self.assertHasServerConnectionId(spec, actual) elif name == "commandFailedEvent": self.test.assertIsInstance(actual, CommandFailedEvent) self.assertHasServiceId(spec, actual) self.assertHasDatabaseName(spec, actual) + self.assertHasServerConnectionId(spec, actual) elif name == "poolCreatedEvent": self.test.assertIsInstance(actual, PoolCreatedEvent) elif name == "poolReadyEvent": From 0cc968c0297af02de0e2363b5a2498b068121340 Mon Sep 17 00:00:00 2001 From: Casey Clements Date: Mon, 4 Dec 2023 12:11:35 -0500 Subject: [PATCH 024/221] =?UTF-8?q?PYTHON-4072=20Add=20test=20decorator=20?= =?UTF-8?q?turning=20off=20test.test=5Fdatabase.TestDat=E2=80=A6=20(#1449)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/test_database.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/test_database.py b/test/test_database.py index 7635bab6c6..8fc8bff61e 100644 --- a/test/test_database.py +++ b/test/test_database.py @@ -375,6 +375,7 @@ def test_validate_collection(self): self.assertTrue(db.validate_collection(db.test, True, True)) @client_context.require_version_min(4, 3, 3) + @client_context.require_no_standalone def test_validate_collection_background(self): db = self.client.pymongo_test db.test.insert_one({"dummy": "object"}) From 704858ee3e01d2ba3aaca87b610c3d45524d3c15 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 4 Dec 2023 13:11:39 -0600 Subject: [PATCH 025/221] PYTHON-4045 Use PyPI Trusted Publishing (#1442) --- .github/workflows/release-python.yml | 152 +++++++++++++++++++++++++++ RELEASE.md | 78 +++++++++----- pyproject.toml | 14 +++ 3 files changed, 215 insertions(+), 29 deletions(-) create mode 100644 .github/workflows/release-python.yml diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml new file mode 100644 index 0000000000..334a4039b9 --- /dev/null +++ b/.github/workflows/release-python.yml @@ -0,0 +1,152 @@ +name: Python Wheels + +on: + push: + tags: + - "[0-9]+.[0-9]+.[0-9]+" + - "[0-9]+.[0-9]+.[0-9]+.post[0-9]+" + - "[0-9]+.[0-9]+.[0-9]+[a-b][0-9]+" + - "[0-9]+.[0-9]+.[0-9]+rc[0-9]+" + workflow_dispatch: + +concurrency: + group: wheels-${{ github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash -eux {0} + +jobs: + build_wheels: + name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }} + runs-on: ${{ matrix.buildplat[0] }} + strategy: + # Ensure that a wheel builder finishes even if another fails + fail-fast: false + matrix: + # Github Actions doesn't support pairing matrix values together, let's improvise + # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 + buildplat: + - [ubuntu-20.04, manylinux_x86_64] + - [ubuntu-20.04, manylinux_aarch64] + - [ubuntu-20.04, manylinux_ppc64le] + - [ubuntu-20.04, manylinux_s390x] + - [ubuntu-20.04, manylinux_i686] + - [macos-11, macosx_*] + - [windows-2019, win_amd64] + - [windows-2019, win32] + python: ["cp37", "cp38", "cp39", "cp310", "cp311", "cp312"] + + steps: + - name: Checkout pymongo + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Set up python version + run: | + export PYTHON_VERSION=$(sed 's/^cp3/3./' <<< ${{ matrix.python }} ) + echo "PYTHON_VERSION=$PYTHON_VERSION" >> $GITHUB_ENV + + - uses: actions/setup-python@v4 + with: + python-version: ${{env.PYTHON_VERSION}} + cache: 'pip' + cache-dependency-path: 'pyproject.toml' + allow-prereleases: true + + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v2 + with: + platforms: all + + - name: Install cibuildwheel + run: python -m pip install "cibuildwheel>=2.4,<3" + + - name: Build MacOS Py37 Wheel + # Universal wheels are not supported with Python 3.7, so we explicitly + # produce an x86_64 wheel for Python 3.7 on MacOS. + if: ${{ matrix.python == 'cp37' && matrix.buildplat[0] == 'macos-11' }} + env: + CIBW_BUILD: cp37-macosx_x86_64 + CIBW_ARCHS: x86_64 + CIBW_TEST_COMMAND: "python {project}/tools/fail_if_no_c.py" + run: python -m cibuildwheel --output-dir wheelhouse + + - name: Build wheels + if: ${{ matrix.python != 'cp37' || matrix.buildplat[0] != 'macos-11' }} + env: + CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }} + run: python -m cibuildwheel --output-dir wheelhouse + + - uses: actions/upload-artifact@v3 + with: + name: wheel-${{ matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }} + path: ./wheelhouse/*.whl + if-no-files-found: error + + make_sdist: + name: Make SDist + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v4 + with: + # Build sdist on lowest supported Python + python-version: '3.7' + + - name: Build SDist + run: | + set -ex + python -m pip install -U pip build + python -m build --sdist . + + - name: Test SDist + run: | + python -m pip install dist/*.gz + cd .. + python -c "from pymongo import has_c; assert has_c()" + + - uses: actions/upload-artifact@v3 + with: + name: "sdist" + path: ./dist/*.tar.gz + + collect_dist: + runs-on: ubuntu-latest + needs: [build_wheels, make_sdist] + name: Download Wheels + steps: + - name: Download all workflow run artifacts + uses: actions/download-artifact@v3 + - name: Flatten directory + working-directory: . + run: | + find . -mindepth 2 -type f -exec mv {} . \; + find . -type d -empty -delete + - uses: actions/upload-artifact@v3 + with: + name: all-dist-${{ github.head_ref || github.ref_name }} + path: "./*" + + publish: + # https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/#publishing-the-distribution-to-pypi + needs: [collect_dist] + if: startsWith(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write + steps: + - name: Download all the dists + uses: actions/download-artifact@v3 + with: + name: all-dist-${{ github.head_ref || github.ref_name }} + path: dist/ + - name: Publish distribution 📦 to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/RELEASE.md b/RELEASE.md index 05d7c8d63d..3c2990df08 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -28,52 +28,41 @@ be removed in a release that changes the major version number. ## Doing a Release -1. PyMongo is tested on Evergreen. Ensure the latest commit are passing +1. PyMongo is tested on Evergreen. Ensure the latest commit are passing [CI](https://spruce.mongodb.com/commits/mongo-python-driver) as expected. -2. Check Jira to ensure all the tickets in this version have been +2. Check Jira to ensure all the tickets in this version have been completed. -3. Add release notes to `doc/changelog.rst`. Generally just +3. Make a PR that adds the release notes to `doc/changelog.rst`. Generally just summarize/clarify the git log, but you might add some more long form notes for big changes. -4. Make sure version number is updated in `pymongo/_version.py` +4. Merge the PR. -5. Commit with a BUMP version_number message, eg - `git commit -m 'BUMP 3.11.0'`. +5. Clone the source repository in a temporary directory and check out the + release branch. -6. Tag w/ version_number, eg, - `git tag -a '3.11.0' -m 'BUMP 3.11.0' `. +6. Update the version number in `pymongo/_version.py`. -7. Bump the version number to `.dev0` in - `pymongo/_version.py`, commit, push. - -8. Push commit / tag, eg `git push && git push --tags`. - -9. Pushing a tag will trigger a release process in Evergreen which - builds wheels for manylinux, macOS, and Windows. Wait for the - "release-combine" task to complete and then download the "Release - files all" archive. See https://spruce.mongodb.com/commits/mongo-python-driver?buildVariants=release&view=ALL +7. Commit the change, e.g. `git add . && git commit -m "BUMP "` - The contents should look like this: +7. Tag w/ version_number, eg, + `git tag -a '4.1.0' -m 'BUMP 4.1.0'`. - $ ls path/to/archive - pymongo--cp310-cp310-macosx_10_9_universal2.whl - ... - pymongo--cp38-cp38-manylinux2014_x86_64.whl - ... - pymongo--cp38-cp38-win_amd64.whl - ... - pymongo-.tar.gz +8. Bump the version number to `.dev0` in + `pymongo/_version.py`, commit, push. -10. Upload all the release packages to PyPI with twine: +9. Push commit / tag, eg `git push && git push --tags`. - $ python3 -m twine upload path/to/archive/* +10. Pushing a tag will trigger the release process on GitHub Actions + that will require a member of the team to authorize the deployment. + Navigate to https://github.com/mongodb/mongo-python-driver/actions/workflows/release-python.yml + and wait for the publish to complete. 11. Make sure the new version appears on `https://pymongo.readthedocs.io/en/stable/`. If the new version does not show - up automatically, trigger a rebuild of "latest" on https://readthedocs.org/projects/pymongo/builds/. + up automatically, trigger a rebuild of "stable" on https://readthedocs.org/projects/pymongo/builds/. 12. Publish the release version in Jira and add a description of the release, such as a the reason or the main feature. @@ -87,3 +76,34 @@ be removed in a release that changes the major version number. The title should be "PyMongo X.Y.Z", and the description should contain a link to the release notes on the the community forum, e.g. "Release notes: mongodb.com/community/forums/t/pymongo-4-0-2-released/150457" + +16. Wait for automated update PR on conda-forge, e.g.: https://github.com/conda-forge/pymongo-feedstock/pull/81 + Update dependencies if needed. + + +## Doing a Bug Fix Release + +1. If it is a new branch, first create the release branch and Evergreen project. + +- Clone the source repository in a temporary location. + +- Create a branch from the tag, e.g. `git checkout -b v4.1 4.1.0`. + +- Push the branch, e.g.: `git push origin v4.6`. + +- Create a new project in Evergreen for the branch by duplicating the "Mongo Python Driver" project. + Select the option to create a JIRA ticket for S3 bucket permissions. + +- Update the "Display Name", "Branch Name", and "Identifier". + +- Attach the project to the repository. + +- Wait for the JIRA ticket to be resolved and verify S3 upload capability with a patch release on the + new project. + +2. Create a PR against the release branch. + +3. Create a release using the "Doing a Release" checklist above, ensuring that you + check out the appropriate release branch in the source checkout. + +4. Cherry-pick the changelog PR onto the `master` branch. diff --git a/pyproject.toml b/pyproject.toml index db2c956690..5f75f2733f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -224,3 +224,17 @@ partial_branches = ["if (.*and +)*not _use_c( and.*)*:"] [tool.coverage.html] directory = "htmlcov" + +[tool.cibuildwheel] +skip = "pp* *-musllinux*" +build-frontend = "build" +test-command = "python {project}/tools/fail_if_no_c.py" + +[tool.cibuildwheel.linux] +archs = "x86_64 aarch64 ppc64le s390x i686" +manylinux-x86_64-image = "manylinux_2_28" + +[tool.cibuildwheel.macos] +archs = "universal2" +test-command = "ENSURE_UNIVERSAL2=1 python {project}/tools/fail_if_no_c.py" +test-skip = "*-macosx_universal2:arm64" From 988ce0df332fdbba5ae841066e0c156a150648a3 Mon Sep 17 00:00:00 2001 From: Casey Clements Date: Mon, 4 Dec 2023 17:51:40 -0500 Subject: [PATCH 026/221] PYTHON-4070 Add setuptools as dep for test target (#1451) --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index bbf2307d90..f08f0d6ac6 100644 --- a/tox.ini +++ b/tox.ini @@ -48,6 +48,8 @@ labels = # Use labels and -m instead of -e so that tox -m